You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2017/03/09 09:23:03 UTC
[01/19] lucene-solr:jira/solr-9835: SOLR-9986: Fix precommit
Repository: lucene-solr
Updated Branches:
refs/heads/jira/solr-9835 00a1857f2 -> 9c91418da
SOLR-9986: Fix precommit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7af6cc97
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7af6cc97
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7af6cc97
Branch: refs/heads/jira/solr-9835
Commit: 7af6cc97151d727cb4d4e8730491a32e56a29397
Parents: 3131ec2
Author: Cao Manh Dat <da...@apache.org>
Authored: Tue Mar 7 15:37:24 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Tue Mar 7 15:37:24 2017 +0700
----------------------------------------------------------------------
solr/core/src/java/org/apache/solr/request/NumericFacets.java | 1 -
solr/core/src/java/org/apache/solr/schema/DatePointField.java | 1 -
.../solr/update/processor/ParsingFieldUpdateProcessorsTest.java | 1 -
3 files changed, 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7af6cc97/solr/core/src/java/org/apache/solr/request/NumericFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/NumericFacets.java b/solr/core/src/java/org/apache/solr/request/NumericFacets.java
index c3bcb9f..fd17f1f 100644
--- a/solr/core/src/java/org/apache/solr/request/NumericFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/NumericFacets.java
@@ -52,7 +52,6 @@ import org.apache.solr.schema.TrieField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
-import org.apache.solr.util.DateMathParser;
/** Utility class to compute facets on numeric fields. */
final class NumericFacets {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7af6cc97/solr/core/src/java/org/apache/solr/schema/DatePointField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/DatePointField.java b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
index 18bf651..b3517db 100644
--- a/solr/core/src/java/org/apache/solr/schema/DatePointField.java
+++ b/solr/core/src/java/org/apache/solr/schema/DatePointField.java
@@ -25,7 +25,6 @@ import java.util.Date;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.legacy.LegacyNumericRangeQuery;
import org.apache.lucene.legacy.LegacyNumericType;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/7af6cc97/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
index 31f4760..3aeb1fb 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/ParsingFieldUpdateProcessorsTest.java
@@ -18,7 +18,6 @@ package org.apache.solr.update.processor;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.schema.PointField;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
[18/19] lucene-solr:jira/solr-9835: added a test
Posted by da...@apache.org.
added a test
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c680f45f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c680f45f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c680f45f
Branch: refs/heads/jira/solr-9835
Commit: c680f45f2d8ca126e2783c371e07b46bd16234c6
Parents: c85aac2
Author: Noble Paul <no...@apache.org>
Authored: Thu Mar 9 14:41:42 2017 +1030
Committer: Noble Paul <no...@apache.org>
Committed: Thu Mar 9 14:41:42 2017 +1030
----------------------------------------------------------------------
.../TestRuleBasedAuthorizationPlugin.java | 23 ++++++++++++++++++++
1 file changed, 23 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c680f45f/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java b/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
index 03656c5..4cdc555 100644
--- a/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
+++ b/solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
@@ -305,6 +305,21 @@ public class TestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 {
"handler", new DumpRequestHandler(),
"params", new MapSolrParams(singletonMap("key", "VAL2")))
, FORBIDDEN);
+
+ checkRules(makeMap("resource", "/update",
+ "userPrincipal", "solr",
+ "requestType", RequestType.UNKNOWN,
+ "collectionRequests", "go",
+ "handler", new UpdateRequestHandler(),
+ "params", new MapSolrParams(singletonMap("key", "VAL2")))
+ , FORBIDDEN, (Map<String, Object>) Utils.fromJSONString( "{user-role:{" +
+ " admin:[admin_role]," +
+ " update:[update_role]," +
+ " solr:[read_role]}," +
+ " permissions:[" +
+ " {name:update, role:[admin_role,update_role]}," +
+ " {name:read, role:[admin_role,update_role,read_role]}" +
+ "]}"));
}
public void testEditRules() throws IOException {
@@ -438,5 +453,13 @@ public class TestRuleBasedAuthorizationPlugin extends SolrTestCaseJ4 {
}
}
+static String testPerms = "{user-role:{" +
+ " admin:[admin_role]," +
+ " update:[update_role]," +
+ " solr:[read_role]}," +
+ " permissions:[" +
+ " {name:update,role:[admin_role,update_role]}," +
+ " {name:read,role:[admin_role,update_role,read_role]" +
+ "]}";
}
[15/19] lucene-solr:jira/solr-9835: SOLR-8876: change morphline test
config files to work around 'importCommands' bug when using java9
Posted by da...@apache.org.
SOLR-8876: change morphline test config files to work around 'importCommands' bug when using java9
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8756be05
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8756be05
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8756be05
Branch: refs/heads/jira/solr-9835
Commit: 8756be05404758155b850748f807245fdaab6a8b
Parents: e35881a
Author: Chris Hostetter <ho...@apache.org>
Authored: Wed Mar 8 09:52:25 2017 -0700
Committer: Chris Hostetter <ho...@apache.org>
Committed: Wed Mar 8 09:52:46 2017 -0700
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
.../test/org/apache/solr/hadoop/MRUnitBase.java | 2 --
.../MapReduceIndexerToolArgumentParserTest.java | 1 -
.../solr/hadoop/MorphlineBasicMiniMRTest.java | 1 -
.../morphlines/cell/SolrCellMorphlineTest.java | 2 --
.../test-morphlines/loadSolrBasic.conf | 7 +++++-
.../test-morphlines/solrCellDocumentTypes.conf | 23 +++++++++++++++++++-
.../test-morphlines/solrCellJPGCompressed.conf | 17 ++++++++++++++-
.../test-files/test-morphlines/solrCellXML.conf | 11 +++++++++-
.../test-morphlines/tokenizeText.conf | 6 ++++-
.../tutorialReadAvroContainer.conf | 11 +++++++---
.../solr/AbstractSolrMorphlineTestBase.java | 2 --
.../solr/AbstractSolrMorphlineZkTestBase.java | 4 ----
13 files changed, 69 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0e78535..7285e4f 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -310,6 +310,8 @@ Other Changes
correct now if other drivers were installed before test execution (e.g., through IDE).
(hossman, Uwe Schindler)
+* SOLR-8876: change morphline test config files to work around 'importCommands' bug when using java9 (hossman)
+
================== 6.4.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
index 73323b9..558d662 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MRUnitBase.java
@@ -23,7 +23,6 @@ import java.util.Locale;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.hadoop.morphline.MorphlineMapRunner;
import org.apache.solr.morphlines.solr.AbstractSolrMorphlineTestBase;
@@ -38,7 +37,6 @@ public abstract class MRUnitBase extends SolrTestCaseJ4 {
@BeforeClass
public static void setupClass() throws Exception {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
solrHomeZip = SolrOutputFormat.createSolrHomeZip(new File(RESOURCES_DIR + "/solr/mrunit"));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
index 5dfb5cc..1aebcf7 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MapReduceIndexerToolArgumentParserTest.java
@@ -57,7 +57,6 @@ public class MapReduceIndexerToolArgumentParserTest extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
assumeFalse("Does not work on Windows, because it uses UNIX shell commands or POSIX paths", Constants.WINDOWS);
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
index b32f112..6479a20 100644
--- a/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
+++ b/solr/contrib/map-reduce/src/test/org/apache/solr/hadoop/MorphlineBasicMiniMRTest.java
@@ -121,7 +121,6 @@ public class MorphlineBasicMiniMRTest extends SolrTestCaseJ4 {
assumeFalse("HDFS tests were disabled by -Dtests.disableHdfs",
Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
assumeFalse("FIXME: This test does not work with Windows because of native library requirements", Constants.WINDOWS);
AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java b/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
index 3607252..e0872b6 100644
--- a/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
+++ b/solr/contrib/morphlines-cell/src/test/org/apache/solr/morphlines/cell/SolrCellMorphlineTest.java
@@ -42,8 +42,6 @@ public class SolrCellMorphlineTest extends AbstractSolrMorphlineTestBase {
@BeforeClass
public static void beforeClass2() {
assumeFalse("FIXME: Morphlines currently has issues with Windows paths", Constants.WINDOWS);
- assumeFalse("This test fails with Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)",
- Constants.JRE_IS_MINIMUM_JAVA9);
}
@Before
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
index 190d0e4..1c02a9a 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/loadSolrBasic.conf
@@ -39,7 +39,12 @@ SOLR_LOCATOR : ${?ENV_SOLR_LOCATOR}
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
index 7d232dd..4d38256 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellDocumentTypes.conf
@@ -22,7 +22,28 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.stdlib.SetValuesBuilder",
+ "org.kitesdk.morphline.stdlib.CallParentPipeBuilder",
+ "org.kitesdk.morphline.stdlib.GenerateUUIDBuilder",
+ "org.kitesdk.morphline.stdlib.JavaBuilder",
+ "org.kitesdk.morphline.stdlib.TryRulesBuilder",
+ "org.kitesdk.morphline.stdlib.SeparateAttachmentsBuilder",
+ "org.kitesdk.morphline.stdio.ReadCSVBuilder",
+ "org.kitesdk.morphline.avro.ReadAvroContainerBuilder",
+ "org.kitesdk.morphline.avro.ExtractAvroPathsBuilder",
+ "org.kitesdk.morphline.avro.ExtractAvroTreeBuilder",
+ "org.kitesdk.morphline.tika.DetectMimeTypeBuilder",
+ "org.kitesdk.morphline.tika.decompress.DecompressBuilder",
+ "org.kitesdk.morphline.tika.decompress.UnpackBuilder",
+ "org.kitesdk.morphline.twitter.ReadJsonTestTweetsBuilder",
+ "org.apache.solr.morphlines.cell.SolrCellBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.GenerateSolrSequenceKeyBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
commands : [
{ separateAttachments {} }
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
index 66e7d40..85cb2a7 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellJPGCompressed.conf
@@ -25,7 +25,22 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.stdlib.CallParentPipeBuilder",
+ "org.kitesdk.morphline.stdlib.GenerateUUIDBuilder",
+ "org.kitesdk.morphline.stdlib.JavaBuilder",
+ "org.kitesdk.morphline.stdlib.TryRulesBuilder",
+ "org.kitesdk.morphline.stdlib.SeparateAttachmentsBuilder",
+ "org.kitesdk.morphline.tika.DetectMimeTypeBuilder",
+ "org.kitesdk.morphline.tika.decompress.DecompressBuilder",
+ "org.kitesdk.morphline.tika.decompress.UnpackBuilder",
+ "org.apache.solr.morphlines.cell.SolrCellBuilder",
+ "org.apache.solr.morphlines.solr.GenerateSolrSequenceKeyBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
+
commands : [
{ separateAttachments {} }
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
index 43009bd..9e840a9 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/solrCellXML.conf
@@ -25,7 +25,16 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.stdlib.AddValuesBuilder",
+ "org.apache.solr.morphlines.cell.SolrCellBuilder",
+ "org.apache.solr.morphlines.solr.GenerateSolrSequenceKeyBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
+
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
index 9b62276..d9354c4 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tokenizeText.conf
@@ -16,7 +16,11 @@
morphlines : [
{
id : morphline1
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.apache.solr.morphlines.solr.TokenizeTextBuilder"]
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
index 0c00686..eee4ba5 100644
--- a/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
+++ b/solr/contrib/morphlines-core/src/test-files/test-morphlines/tutorialReadAvroContainer.conf
@@ -42,9 +42,14 @@ morphlines : [
# morphline config file
id : morphline1
- # Import all morphline commands in these java packages and their subpackages.
- # Other commands that may be present on the classpath are not visible to this morphline.
- importCommands : ["org.kitesdk.**", "org.apache.solr.**"]
+ # using globs (foo.bar.* or foo.bar.**) will not work in Java9 due to classpath scanning limitations
+ # so we enumarate every command (builder) we know this config uses below. (see SOLR-8876)
+ importCommands : ["org.kitesdk.morphline.stdlib.LogDebugBuilder",
+ "org.kitesdk.morphline.avro.ReadAvroContainerBuilder",
+ "org.kitesdk.morphline.avro.ExtractAvroPathsBuilder",
+ "org.kitesdk.morphline.stdlib.ConvertTimestampBuilder",
+ "org.apache.solr.morphlines.solr.SanitizeUnknownSolrFieldsBuilder",
+ "org.apache.solr.morphlines.solr.LoadSolrBuilder"]
commands : [
{
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
index 9a5791e..c91f31b 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineTestBase.java
@@ -20,7 +20,6 @@ import com.codahale.metrics.MetricRegistry;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import org.apache.commons.io.FileUtils;
-import org.apache.lucene.util.Constants;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
@@ -85,7 +84,6 @@ public class AbstractSolrMorphlineTestBase extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)", Constants.JRE_IS_MINIMUM_JAVA9);
// TODO: test doesn't work with some Locales, see SOLR-6458
savedLocale = Locale.getDefault();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8756be05/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
index 9aa27c4..31e7ebf 100644
--- a/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
+++ b/solr/contrib/morphlines-core/src/test/org/apache/solr/morphlines/solr/AbstractSolrMorphlineZkTestBase.java
@@ -24,7 +24,6 @@ import java.util.Locale;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ListMultimap;
import com.typesafe.config.Config;
-import org.apache.lucene.util.Constants;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
@@ -79,9 +78,6 @@ public abstract class AbstractSolrMorphlineZkTestBase extends SolrCloudTestCase
@BeforeClass
public static void setupClass() throws Exception {
- assumeFalse("This test fails on Java 9 (https://issues.apache.org/jira/browse/SOLR-8876)",
- Constants.JRE_IS_MINIMUM_JAVA9);
-
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
[14/19] lucene-solr:jira/solr-9835: SOLR-10248: Merge
SolrTestCaseJ4's SolrIndexSearcher tracking into the ObjectReleaseTracker.
Posted by da...@apache.org.
SOLR-10248: Merge SolrTestCaseJ4's SolrIndexSearcher tracking into the ObjectReleaseTracker.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e35881a6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e35881a6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e35881a6
Branch: refs/heads/jira/solr-9835
Commit: e35881a63aa9de72cf5c539396266e0d0e676956
Parents: 6a6e303
Author: Mark Miller <ma...@apache.org>
Authored: Wed Mar 8 11:30:08 2017 -0500
Committer: Mark Miller <ma...@apache.org>
Committed: Wed Mar 8 11:44:23 2017 -0500
----------------------------------------------------------------------
.../apache/solr/search/SolrIndexSearcher.java | 3 +
.../java/org/apache/solr/SolrTestCaseJ4.java | 71 +++++---------------
2 files changed, 19 insertions(+), 55 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e35881a6/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 521324a..a7ee433 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -105,6 +105,7 @@ import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.ObjectReleaseTracker;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
@@ -391,6 +392,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
readerStats = snapStatistics(reader);
// do this at the end since an exception in the constructor means we won't close
numOpens.incrementAndGet();
+ assert ObjectReleaseTracker.track(this);
}
/*
@@ -539,6 +541,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
// do this at the end so it only gets done if there are no exceptions
numCloses.incrementAndGet();
+ assert ObjectReleaseTracker.release(this);
}
/** Direct access to the IndexSchema for use with this searcher */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e35881a6/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index a8c93d6..825e7c7 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -297,17 +297,10 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
if (suiteFailureMarker.wasSuccessful()) {
// if the tests passed, make sure everything was closed / released
if (!RandomizedContext.current().getTargetClass().isAnnotationPresent(SuppressObjectReleaseTracker.class)) {
- endTrackingSearchers(120, false);
- String orr = clearObjectTrackerAndCheckEmpty(120);
+ String orr = clearObjectTrackerAndCheckEmpty(20, false);
assertNull(orr, orr);
} else {
- endTrackingSearchers(15, false);
- String orr = ObjectReleaseTracker.checkEmpty();
- if (orr != null) {
- log.warn(
- "Some resources were not closed, shutdown, or released. This has been ignored due to the SuppressObjectReleaseTracker annotation, trying to close them now.");
- ObjectReleaseTracker.tryClose();
- }
+ clearObjectTrackerAndCheckEmpty(20, true);
}
}
resetFactory();
@@ -341,6 +334,13 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
* @return null if ok else error message
*/
public static String clearObjectTrackerAndCheckEmpty(int waitSeconds) {
+ return clearObjectTrackerAndCheckEmpty(waitSeconds, false);
+ }
+
+ /**
+ * @return null if ok else error message
+ */
+ public static String clearObjectTrackerAndCheckEmpty(int waitSeconds, boolean tryClose) {
int retries = 0;
String result;
do {
@@ -367,6 +367,13 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
log.info("------------------------------------------------------- Done waiting for tracked resources to be released");
+
+ if (tryClose && result != null && RandomizedContext.current().getTargetClass().isAnnotationPresent(SuppressObjectReleaseTracker.class)) {
+ log.warn(
+ "Some resources were not closed, shutdown, or released. This has been ignored due to the SuppressObjectReleaseTracker annotation, trying to close them now.");
+ ObjectReleaseTracker.tryClose();
+ }
+
ObjectReleaseTracker.clear();
return result;
@@ -580,52 +587,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
numOpens = numCloses = 0;
}
}
-
- public static void endTrackingSearchers(int waitSeconds, boolean failTest) {
- long endNumOpens = SolrIndexSearcher.numOpens.get();
- long endNumCloses = SolrIndexSearcher.numCloses.get();
-
- // wait a bit in case any ending threads have anything to release
- int retries = 0;
- while (endNumOpens - numOpens != endNumCloses - numCloses) {
- if (retries++ > waitSeconds) {
- break;
- }
- if (retries % 10 == 0) {
- log.info("Waiting for all SolrIndexSearchers to be released at end of test");
- if (retries > 10) {
- TraceFormatting tf = new TraceFormatting();
- Map<Thread,StackTraceElement[]> stacksMap = Thread.getAllStackTraces();
- Set<Entry<Thread,StackTraceElement[]>> entries = stacksMap.entrySet();
- for (Entry<Thread,StackTraceElement[]> entry : entries) {
- String stack = tf.formatStackTrace(entry.getValue());
- System.err.println(entry.getKey().getName() + ":\n" + stack);
- }
- }
- }
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {}
- endNumOpens = SolrIndexSearcher.numOpens.get();
- endNumCloses = SolrIndexSearcher.numCloses.get();
- }
-
- log.info("------------------------------------------------------- Done waiting for all SolrIndexSearchers to be released");
-
- SolrIndexSearcher.numOpens.getAndSet(0);
- SolrIndexSearcher.numCloses.getAndSet(0);
-
- if (endNumOpens-numOpens != endNumCloses-numCloses) {
- String msg = "ERROR: SolrIndexSearcher opens=" + (endNumOpens-numOpens) + " closes=" + (endNumCloses-numCloses);
- log.error(msg);
- // if it's TestReplicationHandler, ignore it. the test is broken and gets no love
- if ("TestReplicationHandler".equals(RandomizedContext.current().getTargetClass().getSimpleName())) {
- log.warn("TestReplicationHandler wants to fail!: " + msg);
- } else {
- if (failTest) fail(msg);
- }
- }
- }
/** Causes an exception matching the regex pattern to not be logged. */
public static void ignoreException(String pattern) {
[13/19] lucene-solr:jira/solr-9835: SOLR-10244: TestCoreDiscovery
fails if you run it as root.
Posted by da...@apache.org.
SOLR-10244: TestCoreDiscovery fails if you run it as root.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/6a6e3032
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/6a6e3032
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/6a6e3032
Branch: refs/heads/jira/solr-9835
Commit: 6a6e30329843a86de1063a2c8f324eb3f9dbfd91
Parents: 8a54929
Author: Mark Miller <ma...@gmail.com>
Authored: Wed Mar 8 10:23:21 2017 -0500
Committer: Mark Miller <ma...@gmail.com>
Committed: Wed Mar 8 10:23:46 2017 -0500
----------------------------------------------------------------------
solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/6a6e3032/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
index fa07de8..65d459a 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
@@ -337,6 +337,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
File toSet = new File(coreDir, "core1");
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
+ assumeFalse("Appears we are a super user, skip test", toSet.canRead());
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
@@ -362,6 +363,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
File toSet = new File(solrHomeDirectory, "cantReadDir");
assertTrue("Should have been able to make directory '" + toSet.getAbsolutePath() + "' ", toSet.mkdirs());
assumeTrue("Cannot make " + toSet + " non-readable. Test aborted.", toSet.setReadable(false, false));
+ assumeFalse("Appears we are a super user, skip test", toSet.canRead());
CoreContainer cc = init();
try (SolrCore core1 = cc.getCore("core1");
SolrCore core2 = cc.getCore("core2")) {
@@ -421,7 +423,7 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
new File(homeDir, "core1" + File.separator + CorePropertiesLocator.PROPERTIES_FILENAME));
assumeTrue("Cannot make " + homeDir + " non-readable. Test aborted.", homeDir.setReadable(false, false));
-
+ assumeFalse("Appears we are a super user, skip test", homeDir.canRead());
CoreContainer cc = null;
try {
cc = init();
[17/19] lucene-solr:jira/solr-9835: SOLR-10254: Fix pre-commit
Posted by da...@apache.org.
SOLR-10254: Fix pre-commit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c85aac2a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c85aac2a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c85aac2a
Branch: refs/heads/jira/solr-9835
Commit: c85aac2a65472d0d80050a703c99844e694c1584
Parents: 682c6a7
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Mar 8 21:35:24 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 8 21:35:24 2017 -0500
----------------------------------------------------------------------
.../solr/client/solrj/io/stream/SignificantTermsStream.java | 9 ---------
1 file changed, 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c85aac2a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index 2acee51..101a71d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -19,7 +19,6 @@ package org.apache.solr.client.solrj.io.stream;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
@@ -27,13 +26,10 @@ import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
-import java.util.Random;
-import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.io.SolrClientCache;
import org.apache.solr.client.solrj.io.Tuple;
@@ -48,11 +44,6 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkCoreNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.NamedList;
[07/19] lucene-solr:jira/solr-9835: Add 6.4.2 back compat test indexes
Posted by da...@apache.org.
Add 6.4.2 back compat test indexes
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3a993396
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3a993396
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3a993396
Branch: refs/heads/jira/solr-9835
Commit: 3a9933960ec35e3083f261549dfed0e75fd8268c
Parents: 57e8543
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Wed Mar 8 01:21:45 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 8 01:21:45 2017 +0530
----------------------------------------------------------------------
.../lucene/index/TestBackwardsCompatibility.java | 4 +++-
.../org/apache/lucene/index/index.6.4.2-cfs.zip | Bin 0 -> 15856 bytes
.../org/apache/lucene/index/index.6.4.2-nocfs.zip | Bin 0 -> 15886 bytes
3 files changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a993396/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 57ce52a..1dda6b6 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -295,7 +295,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
"6.4.0-cfs",
"6.4.0-nocfs",
"6.4.1-cfs",
- "6.4.1-nocfs"
+ "6.4.1-nocfs",
+ "6.4.2-cfs",
+ "6.4.2-nocfs"
};
final String[] unsupportedNames = {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a993396/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip
new file mode 100644
index 0000000..eee89f4
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-cfs.zip differ
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3a993396/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip
new file mode 100644
index 0000000..d55a6f6
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.6.4.2-nocfs.zip differ
[08/19] lucene-solr:jira/solr-9835: LUCENE-7718:
buildAndPushRelease.py script should refer to working tree instead of
directory
Posted by da...@apache.org.
LUCENE-7718: buildAndPushRelease.py script should refer to working tree instead of directory
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a6e14ec6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a6e14ec6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a6e14ec6
Branch: refs/heads/jira/solr-9835
Commit: a6e14ec6d2d176f8363efc46b0685fda9a0942b2
Parents: 3a99339
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Tue Feb 28 21:22:17 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 8 01:49:13 2017 +0530
----------------------------------------------------------------------
dev-tools/scripts/buildAndPushRelease.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a6e14ec6/dev-tools/scripts/buildAndPushRelease.py
----------------------------------------------------------------------
diff --git a/dev-tools/scripts/buildAndPushRelease.py b/dev-tools/scripts/buildAndPushRelease.py
index e34c943..d742214 100644
--- a/dev-tools/scripts/buildAndPushRelease.py
+++ b/dev-tools/scripts/buildAndPushRelease.py
@@ -59,7 +59,7 @@ def runAndSendGPGPassword(command, password):
def getGitRev():
status = os.popen('git status').read().strip()
- if 'nothing to commit, working directory clean' not in status:
+ if 'nothing to commit, working directory clean' not in status and 'nothing to commit, working tree clean' not in status:
raise RuntimeError('git clone is dirty:\n\n%s' % status)
branch = os.popen('git rev-parse --abbrev-ref HEAD').read().strip()
command = 'git log origin/%s..' % branch
[19/19] lucene-solr:jira/solr-9835: Merge branch 'master' into
jira/solr-9835
Posted by da...@apache.org.
Merge branch 'master' into jira/solr-9835
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9c91418d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9c91418d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9c91418d
Branch: refs/heads/jira/solr-9835
Commit: 9c91418dae60b0731114a377a55f6ed1d837605d
Parents: 00a1857 c680f45
Author: Cao Manh Dat <da...@apache.org>
Authored: Thu Mar 9 16:22:46 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Thu Mar 9 16:22:46 2017 +0700
----------------------------------------------------------------------
dev-tools/doap/lucene.rdf | 7 +
dev-tools/doap/solr.rdf | 7 +
dev-tools/scripts/buildAndPushRelease.py | 2 +-
lucene/CHANGES.txt | 7 +
.../index/TestBackwardsCompatibility.java | 4 +-
.../org/apache/lucene/index/index.6.4.2-cfs.zip | Bin 0 -> 15856 bytes
.../apache/lucene/index/index.6.4.2-nocfs.zip | Bin 0 -> 15886 bytes
.../complexPhrase/ComplexPhraseQueryParser.java | 21 +-
.../queryparser/simple/SimpleQueryParser.java | 8 +-
.../complexPhrase/TestComplexPhraseQuery.java | 36 +-
.../simple/TestSimpleQueryParser.java | 2 +-
solr/CHANGES.txt | 18 +
.../handler/dataimport/TestJdbcDataSource.java | 80 +++-
.../test/org/apache/solr/hadoop/MRUnitBase.java | 2 -
.../MapReduceIndexerToolArgumentParserTest.java | 1 -
.../solr/hadoop/MorphlineBasicMiniMRTest.java | 1 -
.../morphlines/cell/SolrCellMorphlineTest.java | 2 -
.../test-morphlines/loadSolrBasic.conf | 7 +-
.../test-morphlines/solrCellDocumentTypes.conf | 23 +-
.../test-morphlines/solrCellJPGCompressed.conf | 17 +-
.../test-files/test-morphlines/solrCellXML.conf | 11 +-
.../test-morphlines/tokenizeText.conf | 6 +-
.../tutorialReadAvroContainer.conf | 11 +-
.../solr/AbstractSolrMorphlineTestBase.java | 2 -
.../solr/AbstractSolrMorphlineZkTestBase.java | 4 -
.../org/apache/solr/cloud/ElectionContext.java | 5 +-
.../java/org/apache/solr/cloud/Overseer.java | 7 +-
.../solr/cloud/OverseerNodePrioritizer.java | 2 +-
.../solr/cloud/OverseerTaskProcessor.java | 6 +-
.../org/apache/solr/cloud/ZkController.java | 2 +-
.../org/apache/solr/core/CoreContainer.java | 30 +-
.../org/apache/solr/core/JmxMonitoredMap.java | 9 +-
.../src/java/org/apache/solr/core/SolrCore.java | 4 +-
.../org/apache/solr/core/SolrInfoMBean.java | 4 +-
.../org/apache/solr/core/SolrXmlConfig.java | 3 +-
.../apache/solr/handler/RequestHandlerBase.java | 7 +-
.../org/apache/solr/handler/StreamHandler.java | 27 ++
.../handler/admin/MetricsCollectorHandler.java | 228 +++++++++++
.../solr/handler/admin/MetricsHandler.java | 2 +-
.../apache/solr/metrics/AggregateMetric.java | 200 ++++++++++
.../solr/metrics/SolrCoreMetricManager.java | 125 +++++-
.../apache/solr/metrics/SolrMetricManager.java | 325 ++++++++++++++-
.../metrics/reporters/JmxObjectNameFactory.java | 6 +-
.../reporters/solr/SolrClusterReporter.java | 277 +++++++++++++
.../metrics/reporters/solr/SolrReporter.java | 392 +++++++++++++++++++
.../reporters/solr/SolrShardReporter.java | 188 +++++++++
.../metrics/reporters/solr/package-info.java | 22 ++
.../org/apache/solr/request/NumericFacets.java | 1 -
.../org/apache/solr/schema/DatePointField.java | 1 -
.../apache/solr/search/SolrIndexSearcher.java | 3 +
.../java/org/apache/solr/update/PeerSync.java | 8 +-
.../org/apache/solr/util/stats/MetricUtils.java | 267 +++++++++----
.../src/test-files/solr/solr-solrreporter.xml | 66 ++++
.../apache/solr/cloud/TestCloudRecovery.java | 6 +-
.../org/apache/solr/core/TestCoreDiscovery.java | 4 +-
.../apache/solr/core/TestJmxMonitoredMap.java | 2 +-
.../solr/metrics/SolrCoreMetricManagerTest.java | 31 +-
.../solr/metrics/SolrMetricManagerTest.java | 30 +-
.../metrics/SolrMetricsIntegrationTest.java | 15 +-
.../metrics/reporters/SolrJmxReporterTest.java | 13 +-
.../reporters/solr/SolrCloudReportersTest.java | 163 ++++++++
.../reporters/solr/SolrShardReporterTest.java | 117 ++++++
.../TestRuleBasedAuthorizationPlugin.java | 23 ++
.../solr/update/TestInPlaceUpdatesDistrib.java | 19 +-
.../ParsingFieldUpdateProcessorsTest.java | 1 -
.../apache/solr/util/stats/MetricUtilsTest.java | 54 ++-
.../client/solrj/impl/BinaryRequestWriter.java | 4 +-
.../solr/client/solrj/io/SolrClientCache.java | 26 +-
.../solrj/io/stream/SignificantTermsStream.java | 58 +--
.../client/solrj/io/stream/TupleStream.java | 94 +++++
.../solrj/io/stream/StreamExpressionTest.java | 234 +++++++----
.../client/solrj/request/TestCoreAdmin.java | 4 +-
.../java/org/apache/solr/SolrTestCaseJ4.java | 71 +---
73 files changed, 3022 insertions(+), 443 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c91418d/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c91418d/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c91418d/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c91418d/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c91418d/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
----------------------------------------------------------------------
diff --cc solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index a3ed5ae,4c90bc6..e098d8a
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@@ -91,13 -97,18 +98,23 @@@ public class TestInPlaceUpdatesDistrib
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxTime);
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxDocs);
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxDocs);
+
+ // assert that NoMergePolicy was chosen
+ RefCounted<IndexWriter> iw = h.getCore().getSolrCoreState().getIndexWriter(h.getCore());
+ try {
+ IndexWriter writer = iw.get();
+ assertTrue("Actual merge policy is: " + writer.getConfig().getMergePolicy(),
+ writer.getConfig().getMergePolicy() instanceof NoMergePolicy);
+ } finally {
+ iw.decref();
+ }
}
-
+
+ @Override
+ protected int getRealtimeReplicas() {
+ return onlyLeaderIndexes? 1 : -1;
+ }
+
@After
public void after() {
System.clearProperty("solr.tests.intClassName");
[03/19] lucene-solr:jira/solr-9835: SOLR-10178,
SOLR-10079: Force tests to always use NoMergePolicy,
also assert that it was used
Posted by da...@apache.org.
SOLR-10178, SOLR-10079: Force tests to always use NoMergePolicy, also assert that it was used
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/190f4b6b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/190f4b6b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/190f4b6b
Branch: refs/heads/jira/solr-9835
Commit: 190f4b6b935d39d5c08b9a23a07c9c891d197312
Parents: 21559fe
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Tue Mar 7 19:02:26 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Tue Mar 7 19:02:26 2017 +0530
----------------------------------------------------------------------
.../solr/update/TestInPlaceUpdatesDistrib.java | 19 ++++++++++++++++++-
1 file changed, 18 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/190f4b6b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index b107cbd..4c90bc6 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@ -30,6 +30,8 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.client.solrj.SolrClient;
@@ -55,6 +57,7 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.index.NoMergePolicyFactory;
import org.apache.solr.update.processor.DistributedUpdateProcessor;
import org.apache.solr.util.DefaultSolrThreadFactory;
+import org.apache.solr.util.RefCounted;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.BeforeClass;
@@ -82,7 +85,11 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
// we need consistent segments that aren't re-ordered on merge because we're
// asserting inplace updates happen by checking the internal [docid]
systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName());
-
+
+ // HACK: Don't use a RandomMergePolicy, but only use the mergePolicyFactory that we've just set
+ System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICYFACTORY, "true");
+ System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICY, "false");
+
initCore(configString, schemaString);
// sanity check that autocommits are disabled
@@ -90,6 +97,16 @@ public class TestInPlaceUpdatesDistrib extends AbstractFullDistribZkTestBase {
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxTime);
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoCommmitMaxDocs);
assertEquals(-1, h.getCore().getSolrConfig().getUpdateHandlerInfo().autoSoftCommmitMaxDocs);
+
+ // assert that NoMergePolicy was chosen
+ RefCounted<IndexWriter> iw = h.getCore().getSolrCoreState().getIndexWriter(h.getCore());
+ try {
+ IndexWriter writer = iw.get();
+ assertTrue("Actual merge policy is: " + writer.getConfig().getMergePolicy(),
+ writer.getConfig().getMergePolicy() instanceof NoMergePolicy);
+ } finally {
+ iw.decref();
+ }
}
@After
[16/19] lucene-solr:jira/solr-9835: SOLR-10254: significantTerms
Streaming Expression should work in non-SolrCloud mode
Posted by da...@apache.org.
SOLR-10254: significantTerms Streaming Expression should work in non-SolrCloud mode
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/682c6a7d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/682c6a7d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/682c6a7d
Branch: refs/heads/jira/solr-9835
Commit: 682c6a7d5145129e8ae01ff00505ddf5a564d396
Parents: 8756be0
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Mar 8 21:10:56 2017 -0500
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Mar 8 21:11:26 2017 -0500
----------------------------------------------------------------------
.../org/apache/solr/handler/StreamHandler.java | 27 +++
.../solrj/io/stream/SignificantTermsStream.java | 49 +---
.../client/solrj/io/stream/TupleStream.java | 94 ++++++++
.../solrj/io/stream/StreamExpressionTest.java | 234 +++++++++++++------
4 files changed, 285 insertions(+), 119 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 31b37e7..06e59b6 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -18,6 +18,7 @@ package org.apache.solr.handler;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -246,6 +247,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
int worker = params.getInt("workerID", 0);
int numWorkers = params.getInt("numWorkers", 1);
StreamContext context = new StreamContext();
+ context.put("shards", getCollectionShards(params));
context.workerID = worker;
context.numWorkers = numWorkers;
context.setSolrClientCache(clientCache);
@@ -509,4 +511,29 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
return tuple;
}
}
+
+ private Map<String, List<String>> getCollectionShards(SolrParams params) {
+
+ Map<String, List<String>> collectionShards = new HashMap();
+ Iterator<String> paramsIt = params.getParameterNamesIterator();
+ while(paramsIt.hasNext()) {
+ String param = paramsIt.next();
+ if(param.indexOf(".shards") > -1) {
+ String collection = param.split("\\.")[0];
+ String shardString = params.get(param);
+ String[] shards = shardString.split(",");
+ List<String> shardList = new ArrayList();
+ for(String shard : shards) {
+ shardList.add(shard);
+ }
+ collectionShards.put(collection, shardList);
+ }
+ }
+
+ if(collectionShards.size() > 0) {
+ return collectionShards;
+ } else {
+ return null;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
index 87b5a9f..2acee51 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java
@@ -74,12 +74,9 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
protected transient SolrClientCache cache;
protected transient boolean isCloseCache;
- protected transient CloudSolrClient cloudSolrClient;
-
protected transient StreamContext streamContext;
protected ExecutorService executorService;
-
public SignificantTermsStream(String zkHost,
String collectionName,
Map params,
@@ -168,12 +165,12 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
String zkHost = null;
if(null == zkHostExpression){
zkHost = factory.getCollectionZkHost(collectionName);
- }
- else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
+ } else if(zkHostExpression.getParameter() instanceof StreamExpressionValue) {
zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue();
}
- if(null == zkHost){
- throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName));
+
+ if(zkHost == null){
+ zkHost = factory.getDefaultZkHost();
}
// We've got all the required items
@@ -238,47 +235,13 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
isCloseCache = false;
}
- this.cloudSolrClient = this.cache.getCloudSolrClient(zkHost);
- this.executorService = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrjNamedThreadFactory("FeaturesSelectionStream"));
+ this.executorService = ExecutorUtil.newMDCAwareCachedThreadPool(new SolrjNamedThreadFactory("SignificantTermsStream"));
}
public List<TupleStream> children() {
return null;
}
- private List<String> getShardUrls() throws IOException {
- try {
- ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
-
- Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, false);
-
- ClusterState clusterState = zkStateReader.getClusterState();
- Set<String> liveNodes = clusterState.getLiveNodes();
-
- List<String> baseUrls = new ArrayList<>();
- for(Slice slice : slices) {
- Collection<Replica> replicas = slice.getReplicas();
- List<Replica> shuffler = new ArrayList<>();
- for(Replica replica : replicas) {
- if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
- shuffler.add(replica);
- }
- }
-
- Collections.shuffle(shuffler, new Random());
- Replica rep = shuffler.get(0);
- ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
- String url = zkProps.getCoreUrl();
- baseUrls.add(url);
- }
-
- return baseUrls;
-
- } catch (Exception e) {
- throw new IOException(e);
- }
- }
-
private List<Future<NamedList>> callShards(List<String> baseUrls) throws IOException {
List<Future<NamedList>> futures = new ArrayList<>();
@@ -326,7 +289,7 @@ public class SignificantTermsStream extends TupleStream implements Expressible{
Map<String, int[]> mergeFreqs = new HashMap<>();
long numDocs = 0;
long resultCount = 0;
- for (Future<NamedList> getTopTermsCall : callShards(getShardUrls())) {
+ for (Future<NamedList> getTopTermsCall : callShards(getShards(zkHost, collection, streamContext))) {
NamedList resp = getTopTermsCall.get();
List<String> terms = (List<String>)resp.get("sterms");
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
index 49a806f..ceea6af 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TupleStream.java
@@ -19,9 +19,16 @@ package org.apache.solr.client.solrj.io.stream;
import java.io.Closeable;
import java.io.IOException;
import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
import java.util.List;
+import java.util.Random;
+import java.util.Set;
import java.util.UUID;
+import java.util.Map;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.io.Tuple;
import org.apache.solr.client.solrj.io.comp.StreamComparator;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
@@ -29,6 +36,14 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
import org.apache.solr.common.IteratorWriter;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.Aliases;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkCoreNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.StrUtils;
public abstract class TupleStream implements Closeable, Serializable, MapWriter {
@@ -84,4 +99,83 @@ public abstract class TupleStream implements Closeable, Serializable, MapWriter
public UUID getStreamNodeId(){
return streamNodeId;
}
+
+ public static List<String> getShards(String zkHost,
+ String collection,
+ StreamContext streamContext)
+ throws IOException {
+ Map<String, List<String>> shardsMap = null;
+ List<String> shards = new ArrayList();
+
+ if(streamContext != null) {
+ shardsMap = (Map<String, List<String>>)streamContext.get("shards");
+ }
+
+ if(shardsMap != null) {
+ //Manual Sharding
+ shards = shardsMap.get(collection);
+ } else {
+ //SolrCloud Sharding
+ CloudSolrClient cloudSolrClient = streamContext.getSolrClientCache().getCloudSolrClient(zkHost);
+ ZkStateReader zkStateReader = cloudSolrClient.getZkStateReader();
+ ClusterState clusterState = zkStateReader.getClusterState();
+ Collection<Slice> slices = getSlices(collection, zkStateReader, true);
+ Set<String> liveNodes = clusterState.getLiveNodes();
+ for(Slice slice : slices) {
+ Collection<Replica> replicas = slice.getReplicas();
+ List<Replica> shuffler = new ArrayList<>();
+ for(Replica replica : replicas) {
+ if(replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName()))
+ shuffler.add(replica);
+ }
+
+ Collections.shuffle(shuffler, new Random());
+ Replica rep = shuffler.get(0);
+ ZkCoreNodeProps zkProps = new ZkCoreNodeProps(rep);
+ String url = zkProps.getCoreUrl();
+ shards.add(url);
+ }
+ }
+
+ return shards;
+ }
+
+ public static Collection<Slice> getSlices(String collectionName,
+ ZkStateReader zkStateReader,
+ boolean checkAlias) throws IOException {
+ ClusterState clusterState = zkStateReader.getClusterState();
+
+ Map<String, DocCollection> collectionsMap = clusterState.getCollectionsMap();
+
+ // Check collection case sensitive
+ if(collectionsMap.containsKey(collectionName)) {
+ return collectionsMap.get(collectionName).getActiveSlices();
+ }
+
+ // Check collection case insensitive
+ for(String collectionMapKey : collectionsMap.keySet()) {
+ if(collectionMapKey.equalsIgnoreCase(collectionName)) {
+ return collectionsMap.get(collectionMapKey).getActiveSlices();
+ }
+ }
+
+ if(checkAlias) {
+ // check for collection alias
+ Aliases aliases = zkStateReader.getAliases();
+ String alias = aliases.getCollectionAlias(collectionName);
+ if (alias != null) {
+ Collection<Slice> slices = new ArrayList<>();
+
+ List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
+ for (String aliasCollectionName : aliasList) {
+ // Add all active slices for this alias collection
+ slices.addAll(collectionsMap.get(aliasCollectionName).getActiveSlices());
+ }
+
+ return slices;
+ }
+ }
+
+ throw new IOException("Slices not found for " + collectionName);
+ }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/682c6a7d/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 30b7056..c61e443 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -335,7 +335,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
tuples = getTuples(stream);
assert(tuples.size() == 4);
- assertOrder(tuples, 4,3,1,2);
+ assertOrder(tuples, 4, 3, 1, 2);
// Basic w/multi comp
expression = StreamExpressionParser.parse("unique(search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\"), over=\"a_f, a_i\")");
@@ -1577,7 +1577,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
List<Tuple> tuples = getTuples(pstream);
assert(tuples.size() == 5);
- assertOrder(tuples, 0,1,3,4,6);
+ assertOrder(tuples, 0, 1, 3, 4, 6);
//Test the eofTuples
@@ -4712,8 +4712,6 @@ public class StreamExpressionTest extends SolrCloudTestCase {
@Test
public void testSignificantTermsStream() throws Exception {
- Assume.assumeTrue(!useAlias);
-
UpdateRequest updateRequest = new UpdateRequest();
for (int i = 0; i < 5000; i++) {
updateRequest.add(id, "a"+i, "test_t", "a b c d m l");
@@ -4742,106 +4740,186 @@ public class StreamExpressionTest extends SolrCloudTestCase {
StreamFactory factory = new StreamFactory()
.withCollectionZkHost("collection1", cluster.getZkServer().getZkAddress())
+ .withDefaultZkHost(cluster.getZkServer().getZkAddress())
.withFunctionName("significantTerms", SignificantTermsStream.class);
- String significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
+ StreamContext streamContext = new StreamContext();
+ SolrClientCache cache = new SolrClientCache();
+ streamContext.setSolrClientCache(cache);
+ try {
- assert(tuples.size() == 3);
- assertTrue(tuples.get(0).get("term").equals("l"));
- assertTrue(tuples.get(0).getLong("background") == 5000);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ String significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
+ assert (tuples.size() == 3);
+ assertTrue(tuples.get(0).get("term").equals("l"));
+ assertTrue(tuples.get(0).getLong("background") == 5000);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- assertTrue(tuples.get(1).get("term").equals("m"));
- assertTrue(tuples.get(1).getLong("background") == 5500);
- assertTrue(tuples.get(1).getLong("foreground") == 5000);
- assertTrue(tuples.get(2).get("term").equals("d"));
- assertTrue(tuples.get(2).getLong("background") == 5600);
- assertTrue(tuples.get(2).getLong("foreground") == 5000);
+ assertTrue(tuples.get(1).get("term").equals("m"));
+ assertTrue(tuples.get(1).getLong("background") == 5500);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
- //Test maxDocFreq
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=2650, minTermLength=1)";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
+ assertTrue(tuples.get(2).get("term").equals("d"));
+ assertTrue(tuples.get(2).getLong("background") == 5600);
+ assertTrue(tuples.get(2).getLong("foreground") == 5000);
- assert(tuples.size() == 1);
- assertTrue(tuples.get(0).get("term").equals("l"));
- assertTrue(tuples.get(0).getLong("background") == 5000);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ //Test maxDocFreq
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=2650, minTermLength=1)";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- //Test maxDocFreq percentage
+ assert (tuples.size() == 1);
+ assertTrue(tuples.get(0).get("term").equals("l"));
+ assertTrue(tuples.get(0).getLong("background") == 5000);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=\".45\", minTermLength=1)";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 1);
- assertTrue(tuples.get(0).get("term").equals("l"));
- assertTrue(tuples.get(0).getLong("background") == 5000);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ //Test maxDocFreq percentage
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, maxDocFreq=\".45\", minTermLength=1)";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
+ assert (tuples.size() == 1);
+ assertTrue(tuples.get(0).get("term").equals("l"));
+ assertTrue(tuples.get(0).getLong("background") == 5000);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- //Test min doc freq
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 3);
+ //Test min doc freq
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- assertTrue(tuples.get(0).get("term").equals("m"));
- assertTrue(tuples.get(0).getLong("background") == 5500);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ assert (tuples.size() == 3);
- assertTrue(tuples.get(1).get("term").equals("d"));
- assertTrue(tuples.get(1).getLong("background") == 5600);
- assertTrue(tuples.get(1).getLong("foreground") == 5000);
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- assertTrue(tuples.get(2).get("term").equals("c"));
- assertTrue(tuples.get(2).getLong("background") == 5900);
- assertTrue(tuples.get(2).getLong("foreground") == 5000);
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
+ assertTrue(tuples.get(2).get("term").equals("c"));
+ assertTrue(tuples.get(2).getLong("background") == 5900);
+ assertTrue(tuples.get(2).getLong("foreground") == 5000);
- //Test min doc freq percent
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\".478\", minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 1);
+ //Test min doc freq percent
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=3, minDocFreq=\".478\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- assertTrue(tuples.get(0).get("term").equals("c"));
- assertTrue(tuples.get(0).getLong("background") == 5900);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ assert (tuples.size() == 1);
+ assertTrue(tuples.get(0).get("term").equals("c"));
+ assertTrue(tuples.get(0).getLong("background") == 5900);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- //Test limit
+ //Test limit
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
- assert(tuples.size() == 2);
+ assert (tuples.size() == 2);
- assertTrue(tuples.get(0).get("term").equals("m"));
- assertTrue(tuples.get(0).getLong("background") == 5500);
- assertTrue(tuples.get(0).getLong("foreground") == 5000);
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
- assertTrue(tuples.get(1).get("term").equals("d"));
- assertTrue(tuples.get(1).getLong("background") == 5600);
- assertTrue(tuples.get(1).getLong("foreground") == 5000);
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
- //Test term length
+ //Test term length
- significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=2)";
- stream = factory.constructStream(significantTerms);
- tuples = getTuples(stream);
- assert(tuples.size() == 0);
+ significantTerms = "significantTerms(collection1, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=2)";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(streamContext);
+ tuples = getTuples(stream);
+ assert (tuples.size() == 0);
- }
+
+ //Test with shards parameter
+ List<String> shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext);
+
+ Map<String, List<String>> shardsMap = new HashMap();
+ shardsMap.put("myCollection", shardUrls);
+ StreamContext context = new StreamContext();
+ context.put("shards", shardsMap);
+ context.setSolrClientCache(cache);
+ significantTerms = "significantTerms(myCollection, q=\"id:a*\", field=\"test_t\", limit=2, minDocFreq=\"2700\", minTermLength=1, maxDocFreq=\".5\")";
+ stream = factory.constructStream(significantTerms);
+ stream.setStreamContext(context);
+ tuples = getTuples(stream);
+
+ assert (tuples.size() == 2);
+
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
+
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
+
+ //Execersise the /stream hander
+
+ //Add the shards http parameter for the myCollection
+ StringBuilder buf = new StringBuilder();
+ for (String shardUrl : shardUrls) {
+ if (buf.length() > 0) {
+ buf.append(",");
+ }
+ buf.append(shardUrl);
+ }
+
+ ModifiableSolrParams solrParams = new ModifiableSolrParams();
+ solrParams.add("qt", "/stream");
+ solrParams.add("expr", significantTerms);
+ solrParams.add("myCollection.shards", buf.toString());
+ SolrStream solrStream = new SolrStream(shardUrls.get(0), solrParams);
+ tuples = getTuples(solrStream);
+ assert (tuples.size() == 2);
+
+ assertTrue(tuples.get(0).get("term").equals("m"));
+ assertTrue(tuples.get(0).getLong("background") == 5500);
+ assertTrue(tuples.get(0).getLong("foreground") == 5000);
+
+ assertTrue(tuples.get(1).get("term").equals("d"));
+ assertTrue(tuples.get(1).getLong("background") == 5600);
+ assertTrue(tuples.get(1).getLong("foreground") == 5000);
+
+ //Add a negative test to prove that it cannot find slices if shards parameter is removed
+
+ try {
+ ModifiableSolrParams solrParamsBad = new ModifiableSolrParams();
+ solrParamsBad.add("qt", "/stream");
+ solrParamsBad.add("expr", significantTerms);
+ solrStream = new SolrStream(shardUrls.get(0), solrParamsBad);
+ tuples = getTuples(solrStream);
+ throw new Exception("Exception should have been thrown above");
+ } catch (IOException e) {
+ assertTrue(e.getMessage().contains("Slices not found for myCollection"));
+ }
+ } finally {
+ cache.close();
+ }
+
+ }
+
@Test
public void testComplementStream() throws Exception {
@@ -4920,12 +4998,16 @@ public class StreamExpressionTest extends SolrCloudTestCase {
}
protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException {
- tupleStream.open();
List<Tuple> tuples = new ArrayList<Tuple>();
- for(Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) {
- tuples.add(t);
+
+ try {
+ tupleStream.open();
+ for (Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) {
+ tuples.add(t);
+ }
+ } finally {
+ tupleStream.close();
}
- tupleStream.close();
return tuples;
}
protected boolean assertOrder(List<Tuple> tuples, int... ids) throws Exception {
[09/19] lucene-solr:jira/solr-9835: SOLR-9858: Collect aggregated
metrics from nodes and shard leaders in overseer.
Posted by da...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
new file mode 100644
index 0000000..e9b8c3d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
@@ -0,0 +1,392 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Gauge;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.ScheduledReporter;
+import com.codahale.metrics.Timer;
+import org.apache.http.client.HttpClient;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.io.SolrClientCache;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.util.stats.MetricUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Implementation of {@link ScheduledReporter} that reports metrics from selected registries and sends
+ * them periodically as update requests to a selected Solr collection and to a configured handler.
+ */
+public class SolrReporter extends ScheduledReporter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final String REGISTRY_ID = "_registry_";
+ public static final String REPORTER_ID = "_reporter_";
+ public static final String GROUP_ID = "_group_";
+ public static final String LABEL_ID = "_label_";
+
+
+ /**
+ * Specification of what registries and what metrics to send.
+ */
+ public static final class Report {
+ public String groupPattern;
+ public String labelPattern;
+ public String registryPattern;
+ public Set<String> metricFilters = new HashSet<>();
+
+ /**
+ * Create a report specification
+ * @param groupPattern logical group for these metrics. This is used in {@link MetricsCollectorHandler}
+ * to select the target registry for metrics to aggregate. Must not be null or empty.
+ * It may contain back-references to capture groups from {@code registryPattern}
+ * @param labelPattern name of this group of metrics. This is used in {@link MetricsCollectorHandler}
+ * to prefix metric names. May be null or empty. It may contain back-references
+ * to capture groups from {@code registryPattern}.
+ * @param registryPattern pattern for selecting matching registries, see {@link SolrMetricManager#registryNames(String...)}
+ * @param metricFilters patterns for selecting matching metrics, see {@link org.apache.solr.metrics.SolrMetricManager.RegexFilter}
+ */
+ public Report(String groupPattern, String labelPattern, String registryPattern, Collection<String> metricFilters) {
+ this.groupPattern = groupPattern;
+ this.labelPattern = labelPattern;
+ this.registryPattern = registryPattern;
+ if (metricFilters != null) {
+ this.metricFilters.addAll(metricFilters);
+ }
+ }
+
+ public static Report fromMap(Map<?, ?> map) {
+ String groupPattern = (String)map.get("group");
+ String labelPattern = (String)map.get("label");
+ String registryPattern = (String)map.get("registry");
+ Object oFilters = map.get("filter");
+ Collection<String> metricFilters = Collections.emptyList();
+ if (oFilters != null) {
+ if (oFilters instanceof String) {
+ metricFilters = Collections.singletonList((String)oFilters);
+ } else if (oFilters instanceof Collection) {
+ metricFilters = (Collection<String>)oFilters;
+ } else {
+ log.warn("Invalid report filters, ignoring: " + oFilters);
+ }
+ }
+ if (groupPattern == null || registryPattern == null) {
+ log.warn("Invalid report configuration, group and registry required!: " + map);
+ return null;
+ }
+ return new Report(groupPattern, labelPattern, registryPattern, metricFilters);
+ }
+ }
+
+ public static class Builder {
+ private final SolrMetricManager metricManager;
+ private final List<Report> reports;
+ private String reporterId;
+ private TimeUnit rateUnit;
+ private TimeUnit durationUnit;
+ private String handler;
+ private boolean skipHistograms;
+ private boolean skipAggregateValues;
+ private boolean cloudClient;
+ private SolrParams params;
+
+ /**
+ * Create a builder for SolrReporter.
+ * @param metricManager metric manager that is the source of metrics
+ * @param reports report definitions
+ * @return builder
+ */
+ public static Builder forReports(SolrMetricManager metricManager, List<Report> reports) {
+ return new Builder(metricManager, reports);
+ }
+
+ private Builder(SolrMetricManager metricManager, List<Report> reports) {
+ this.metricManager = metricManager;
+ this.reports = reports;
+ this.rateUnit = TimeUnit.SECONDS;
+ this.durationUnit = TimeUnit.MILLISECONDS;
+ this.skipHistograms = false;
+ this.skipAggregateValues = false;
+ this.cloudClient = false;
+ this.params = null;
+ }
+
+ /**
+ * Additional {@link SolrParams} to add to every request.
+ * @param params additional params
+ * @return {@code this}
+ */
+ public Builder withSolrParams(SolrParams params) {
+ this.params = params;
+ return this;
+ }
+ /**
+ * If true then use {@link org.apache.solr.client.solrj.impl.CloudSolrClient} for communication.
+ * Default is false.
+ * @param cloudClient use CloudSolrClient when true, {@link org.apache.solr.client.solrj.impl.HttpSolrClient} otherwise.
+ * @return {@code this}
+ */
+ public Builder cloudClient(boolean cloudClient) {
+ this.cloudClient = cloudClient;
+ return this;
+ }
+
+ /**
+ * Histograms are difficult / impossible to aggregate, so it may not be
+ * worth to report them.
+ * @param skipHistograms when true then skip histograms from reports
+ * @return {@code this}
+ */
+ public Builder skipHistograms(boolean skipHistograms) {
+ this.skipHistograms = skipHistograms;
+ return this;
+ }
+
+ /**
+ * Individual values from {@link org.apache.solr.metrics.AggregateMetric} may not be worth to report.
+ * @param skipAggregateValues when tru then skip reporting individual values from the metric
+ * @return {@code this}
+ */
+ public Builder skipAggregateValues(boolean skipAggregateValues) {
+ this.skipAggregateValues = skipAggregateValues;
+ return this;
+ }
+
+ /**
+ * Handler name to use at the remote end.
+ *
+ * @param handler handler name, eg. "/admin/metricsCollector"
+ * @return {@code this}
+ */
+ public Builder withHandler(String handler) {
+ this.handler = handler;
+ return this;
+ }
+
+ /**
+ * Use this id to identify metrics from this instance.
+ *
+ * @param reporterId reporter id
+ * @return {@code this}
+ */
+ public Builder withReporterId(String reporterId) {
+ this.reporterId = reporterId;
+ return this;
+ }
+
+ /**
+ * Convert rates to the given time unit.
+ *
+ * @param rateUnit a unit of time
+ * @return {@code this}
+ */
+ public Builder convertRatesTo(TimeUnit rateUnit) {
+ this.rateUnit = rateUnit;
+ return this;
+ }
+
+ /**
+ * Convert durations to the given time unit.
+ *
+ * @param durationUnit a unit of time
+ * @return {@code this}
+ */
+ public Builder convertDurationsTo(TimeUnit durationUnit) {
+ this.durationUnit = durationUnit;
+ return this;
+ }
+
+ /**
+ * Build it.
+ * @param client an instance of {@link HttpClient} to be used for making calls.
+ * @param urlProvider function that returns the base URL of Solr instance to target. May return
+ * null to indicate that reporting should be skipped. Note: this
+ * function will be called every time just before report is sent.
+ * @return configured instance of reporter
+ */
+ public SolrReporter build(HttpClient client, Supplier<String> urlProvider) {
+ return new SolrReporter(client, urlProvider, metricManager, reports, handler, reporterId, rateUnit, durationUnit,
+ params, skipHistograms, skipAggregateValues, cloudClient);
+ }
+
+ }
+
+ private String reporterId;
+ private String handler;
+ private Supplier<String> urlProvider;
+ private SolrClientCache clientCache;
+ private List<CompiledReport> compiledReports;
+ private SolrMetricManager metricManager;
+ private boolean skipHistograms;
+ private boolean skipAggregateValues;
+ private boolean cloudClient;
+ private ModifiableSolrParams params;
+ private Map<String, Object> metadata;
+
+ private static final class CompiledReport {
+ String group;
+ String label;
+ Pattern registryPattern;
+ MetricFilter filter;
+
+ CompiledReport(Report report) throws PatternSyntaxException {
+ this.group = report.groupPattern;
+ this.label = report.labelPattern;
+ this.registryPattern = Pattern.compile(report.registryPattern);
+ this.filter = new SolrMetricManager.RegexFilter(report.metricFilters);
+ }
+
+ @Override
+ public String toString() {
+ return "CompiledReport{" +
+ "group='" + group + '\'' +
+ ", label='" + label + '\'' +
+ ", registryPattern=" + registryPattern +
+ ", filter=" + filter +
+ '}';
+ }
+ }
+
+ public SolrReporter(HttpClient httpClient, Supplier<String> urlProvider, SolrMetricManager metricManager,
+ List<Report> metrics, String handler,
+ String reporterId, TimeUnit rateUnit, TimeUnit durationUnit,
+ SolrParams params, boolean skipHistograms, boolean skipAggregateValues, boolean cloudClient) {
+ super(null, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit);
+ this.metricManager = metricManager;
+ this.urlProvider = urlProvider;
+ this.reporterId = reporterId;
+ if (handler == null) {
+ handler = MetricsCollectorHandler.HANDLER_PATH;
+ }
+ this.handler = handler;
+ this.clientCache = new SolrClientCache(httpClient);
+ this.compiledReports = new ArrayList<>();
+ metrics.forEach(report -> {
+ MetricFilter filter = new SolrMetricManager.RegexFilter(report.metricFilters);
+ try {
+ CompiledReport cs = new CompiledReport(report);
+ compiledReports.add(cs);
+ } catch (PatternSyntaxException e) {
+ log.warn("Skipping report with invalid registryPattern: " + report.registryPattern, e);
+ }
+ });
+ this.skipHistograms = skipHistograms;
+ this.skipAggregateValues = skipAggregateValues;
+ this.cloudClient = cloudClient;
+ this.params = new ModifiableSolrParams();
+ this.params.set(REPORTER_ID, reporterId);
+ // allow overrides to take precedence
+ if (params != null) {
+ this.params.add(params);
+ }
+ metadata = new HashMap<>();
+ metadata.put(REPORTER_ID, reporterId);
+ }
+
+ @Override
+ public void close() {
+ clientCache.close();
+ super.close();
+ }
+
+ @Override
+ public void report() {
+ String url = urlProvider.get();
+ // if null then suppress reporting
+ if (url == null) {
+ return;
+ }
+
+ SolrClient solr;
+ if (cloudClient) {
+ solr = clientCache.getCloudSolrClient(url);
+ } else {
+ solr = clientCache.getHttpSolrClient(url);
+ }
+ UpdateRequest req = new UpdateRequest(handler);
+ req.setParams(params);
+ compiledReports.forEach(report -> {
+ Set<String> registryNames = metricManager.registryNames(report.registryPattern);
+ registryNames.forEach(registryName -> {
+ String label = report.label;
+ if (label != null && label.indexOf('$') != -1) {
+ // label with back-references
+ Matcher m = report.registryPattern.matcher(registryName);
+ label = m.replaceFirst(label);
+ }
+ final String effectiveLabel = label;
+ String group = report.group;
+ if (group.indexOf('$') != -1) {
+ // group with back-references
+ Matcher m = report.registryPattern.matcher(registryName);
+ group = m.replaceFirst(group);
+ }
+ final String effectiveGroup = group;
+ MetricUtils.toSolrInputDocuments(metricManager.registry(registryName), Collections.singletonList(report.filter), MetricFilter.ALL,
+ skipHistograms, skipAggregateValues, metadata, doc -> {
+ doc.setField(REGISTRY_ID, registryName);
+ doc.setField(GROUP_ID, effectiveGroup);
+ if (effectiveLabel != null) {
+ doc.setField(LABEL_ID, effectiveLabel);
+ }
+ req.add(doc);
+ });
+ });
+ });
+
+ // if no docs added then don't send a report
+ if (req.getDocuments() == null || req.getDocuments().isEmpty()) {
+ return;
+ }
+ try {
+ //log.info("%%% sending to " + url + ": " + req.getParams());
+ solr.request(req);
+ } catch (Exception e) {
+ log.debug("Error sending metric report", e.toString());
+ }
+
+ }
+
+ @Override
+ public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) {
+ // no-op - we do all the work in report()
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
new file mode 100644
index 0000000..2b20274
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This class reports selected metrics from replicas to shard leader.
+ * <p>The following configuration properties are supported:</p>
+ * <ul>
+ * <li>handler - (optional str) handler path where reports are sent. Default is
+ * {@link MetricsCollectorHandler#HANDLER_PATH}.</li>
+ * <li>period - (optional int) how often reports are sent, in seconds. Default is 60. Setting this
+ * to 0 disables the reporter.</li>
+ * <li>filter - (optional multiple str) regex expression(s) matching selected metrics to be reported.</li>
+ * </ul>
+ * NOTE: this reporter uses predefined "replica" group, and it's always created even if explicit configuration
+ * is missing. Default configuration uses filters defined in {@link #DEFAULT_FILTERS}.
+ * <p>Example configuration:</p>
+ * <pre>
+ * <reporter name="test" group="replica">
+ * <int name="period">11</int>
+ * <str name="filter">UPDATE\./update/.*requests</str>
+ * <str name="filter">QUERY\./select.*requests</str>
+ * </reporter>
+ * </pre>
+ */
+public class SolrShardReporter extends SolrMetricReporter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final List<String> DEFAULT_FILTERS = new ArrayList(){{
+ add("TLOG.*");
+ add("REPLICATION.*");
+ add("INDEX.flush.*");
+ add("INDEX.merge.major.*");
+ add("UPDATE\\./update/.*requests");
+ add("QUERY\\./select.*requests");
+ }};
+
+ private String handler = MetricsCollectorHandler.HANDLER_PATH;
+ private int period = SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD;
+ private List<String> filters = new ArrayList<>();
+
+ private SolrReporter reporter;
+
+ /**
+ * Create a reporter for metrics managed in a named registry.
+ *
+ * @param metricManager metric manager
+ * @param registryName registry to use, one of registries managed by
+ * {@link SolrMetricManager}
+ */
+ public SolrShardReporter(SolrMetricManager metricManager, String registryName) {
+ super(metricManager, registryName);
+ }
+
+ public void setHandler(String handler) {
+ this.handler = handler;
+ }
+
+ public void setPeriod(int period) {
+ this.period = period;
+ }
+
+ public void setFilter(List<String> filterConfig) {
+ if (filterConfig == null || filterConfig.isEmpty()) {
+ return;
+ }
+ filters = filterConfig;
+ }
+
+ // for unit tests
+ int getPeriod() {
+ return period;
+ }
+
+ @Override
+ protected void validate() throws IllegalStateException {
+ if (period < 1) {
+ log.info("Turning off shard reporter, period=" + period);
+ }
+ if (filters.isEmpty()) {
+ filters = DEFAULT_FILTERS;
+ }
+ // start in inform(...) only when core is available
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (reporter != null) {
+ reporter.close();
+ }
+ }
+
+ public void setCore(SolrCore core) {
+ if (reporter != null) {
+ reporter.close();
+ }
+ if (core.getCoreDescriptor().getCloudDescriptor() == null) {
+ // not a cloud core
+ log.warn("Not initializing shard reporter for non-cloud core " + core.getName());
+ return;
+ }
+ if (period < 1) { // don't start it
+ log.warn("Not starting shard reporter ");
+ return;
+ }
+ // our id is coreNodeName
+ String id = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
+ // target registry is the leaderRegistryName
+ String groupId = core.getCoreMetricManager().getLeaderRegistryName();
+ if (groupId == null) {
+ log.warn("No leaderRegistryName for core " + core + ", not starting the reporter...");
+ return;
+ }
+ SolrReporter.Report spec = new SolrReporter.Report(groupId, null, registryName, filters);
+ reporter = SolrReporter.Builder.forReports(metricManager, Collections.singletonList(spec))
+ .convertRatesTo(TimeUnit.SECONDS)
+ .convertDurationsTo(TimeUnit.MILLISECONDS)
+ .withHandler(handler)
+ .withReporterId(id)
+ .cloudClient(false) // we want to send reports specifically to a selected leader instance
+ .skipAggregateValues(true) // we don't want to transport details of aggregates
+ .skipHistograms(true) // we don't want to transport histograms
+ .build(core.getCoreDescriptor().getCoreContainer().getUpdateShardHandler().getHttpClient(), new LeaderUrlSupplier(core));
+
+ reporter.start(period, TimeUnit.SECONDS);
+ }
+
+ private static class LeaderUrlSupplier implements Supplier<String> {
+ private SolrCore core;
+
+ LeaderUrlSupplier(SolrCore core) {
+ this.core = core;
+ }
+
+ @Override
+ public String get() {
+ CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
+ if (cd == null) {
+ return null;
+ }
+ ClusterState state = core.getCoreDescriptor().getCoreContainer().getZkController().getClusterState();
+ DocCollection collection = state.getCollection(core.getCoreDescriptor().getCollectionName());
+ Replica replica = collection.getLeader(core.getCoreDescriptor().getCloudDescriptor().getShardId());
+ if (replica == null) {
+ log.warn("No leader for " + collection.getName() + "/" + core.getCoreDescriptor().getCloudDescriptor().getShardId());
+ return null;
+ }
+ String baseUrl = replica.getStr("base_url");
+ if (baseUrl == null) {
+ log.warn("No base_url for replica " + replica);
+ }
+ return baseUrl;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java
new file mode 100644
index 0000000..740bcce
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * This package contains {@link org.apache.solr.metrics.SolrMetricReporter} implementations
+ * specific to SolrCloud reporting.
+ */
+package org.apache.solr.metrics.reporters.solr;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/update/PeerSync.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index ac07413..874e39c 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -161,11 +161,13 @@ public class PeerSync implements SolrMetricProducer {
core.getCoreMetricManager().registerMetricProducer(SolrInfoMBean.Category.REPLICATION.toString(), this);
}
+ public static final String METRIC_SCOPE = "peerSync";
+
@Override
public void initializeMetrics(SolrMetricManager manager, String registry, String scope) {
- syncTime = manager.timer(registry, "time", scope);
- syncErrors = manager.counter(registry, "errors", scope);
- syncSkipped = manager.counter(registry, "skipped", scope);
+ syncTime = manager.timer(registry, "time", scope, METRIC_SCOPE);
+ syncErrors = manager.counter(registry, "errors", scope, METRIC_SCOPE);
+ syncSkipped = manager.counter(registry, "skipped", scope, METRIC_SCOPE);
}
/** optional list of updates we had before possibly receiving new updates */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index 80f035b..5a7c680 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -16,11 +16,15 @@
*/
package org.apache.solr.util.stats;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
@@ -32,13 +36,40 @@ import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
+import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.AggregateMetric;
/**
* Metrics specific utility functions.
*/
public class MetricUtils {
+ public static final String METRIC_NAME = "metric";
+ public static final String VALUES = "values";
+
+ static final String MS = "_ms";
+
+ static final String MIN = "min";
+ static final String MIN_MS = MIN + MS;
+ static final String MAX = "max";
+ static final String MAX_MS = MAX + MS;
+ static final String MEAN = "mean";
+ static final String MEAN_MS = MEAN + MS;
+ static final String MEDIAN = "median";
+ static final String MEDIAN_MS = MEDIAN + MS;
+ static final String STDDEV = "stddev";
+ static final String STDDEV_MS = STDDEV + MS;
+ static final String SUM = "sum";
+ static final String P75 = "p75";
+ static final String P75_MS = P75 + MS;
+ static final String P95 = "p95";
+ static final String P95_MS = P95 + MS;
+ static final String P99 = "p99";
+ static final String P99_MS = P99 + MS;
+ static final String P999 = "p999";
+ static final String P999_MS = P999 + MS;
+
/**
* Adds metrics from a Timer to a NamedList, using well-known back-compat names.
* @param lst The NamedList to add the metrics data to
@@ -77,41 +108,138 @@ public class MetricUtils {
* included in the output
* @param mustMatchFilter a {@link MetricFilter}.
* A metric <em>must</em> match this filter to be included in the output.
+ * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+ * @param metadata optional metadata. If not null and not empty then this map will be added under a
+ * {@code _metadata_} key.
* @return a {@link NamedList}
*/
- public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters, MetricFilter mustMatchFilter) {
- NamedList response = new NamedList();
+ public static NamedList toNamedList(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms,
+ boolean skipAggregateValues,
+ Map<String, Object> metadata) {
+ NamedList result = new NamedList();
+ toNamedMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, (k, v) -> {
+ result.add(k, new NamedList(v));
+ });
+ if (metadata != null && !metadata.isEmpty()) {
+ result.add("_metadata_", new NamedList(metadata));
+ }
+ return result;
+ }
+
+ /**
+ * Returns a representation of the given metric registry as a list of {@link SolrInputDocument}-s.
+ Only those metrics
+ * are converted to NamedList which match at least one of the given MetricFilter instances.
+ *
+ * @param registry the {@link MetricRegistry} to be converted to NamedList
+ * @param shouldMatchFilters a list of {@link MetricFilter} instances.
+ * A metric must match <em>any one</em> of the filters from this list to be
+ * included in the output
+ * @param mustMatchFilter a {@link MetricFilter}.
+ * A metric <em>must</em> match this filter to be included in the output.
+ * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+ * @param metadata optional metadata. If not null and not empty then this map will be added under a
+ * {@code _metadata_} key.
+ * @return a list of {@link SolrInputDocument}-s
+ */
+ public static List<SolrInputDocument> toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms,
+ boolean skipAggregateValues,
+ Map<String, Object> metadata) {
+ List<SolrInputDocument> result = new LinkedList<>();
+ toSolrInputDocuments(registry, shouldMatchFilters, mustMatchFilter, skipHistograms,
+ skipAggregateValues, metadata, doc -> {
+ result.add(doc);
+ });
+ return result;
+ }
+
+ public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms,
+ boolean skipAggregateValues,
+ Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
+ boolean addMetadata = metadata != null && !metadata.isEmpty();
+ toNamedMaps(registry, shouldMatchFilters, mustMatchFilter, skipHistograms, skipAggregateValues, (k, v) -> {
+ SolrInputDocument doc = new SolrInputDocument();
+ doc.setField(METRIC_NAME, k);
+ toSolrInputDocument(null, doc, v);
+ if (addMetadata) {
+ toSolrInputDocument(null, doc, metadata);
+ }
+ consumer.accept(doc);
+ });
+ }
+
+ public static void toSolrInputDocument(String prefix, SolrInputDocument doc, Map<String, Object> map) {
+ for (Map.Entry<String, Object> entry : map.entrySet()) {
+ if (entry.getValue() instanceof Map) { // flatten recursively
+ toSolrInputDocument(entry.getKey(), doc, (Map<String, Object>)entry.getValue());
+ } else {
+ String key = prefix != null ? prefix + "." + entry.getKey() : entry.getKey();
+ doc.addField(key, entry.getValue());
+ }
+ }
+ }
+
+ public static void toNamedMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+ MetricFilter mustMatchFilter, boolean skipHistograms, boolean skipAggregateValues,
+ BiConsumer<String, Map<String, Object>> consumer) {
Map<String, Metric> metrics = registry.getMetrics();
SortedSet<String> names = registry.getNames();
names.stream()
.filter(s -> shouldMatchFilters.stream().anyMatch(metricFilter -> metricFilter.matches(s, metrics.get(s))))
.filter(s -> mustMatchFilter.matches(s, metrics.get(s)))
.forEach(n -> {
- Metric metric = metrics.get(n);
- if (metric instanceof Counter) {
- Counter counter = (Counter) metric;
- response.add(n, counterToNamedList(counter));
- } else if (metric instanceof Gauge) {
- Gauge gauge = (Gauge) metric;
- response.add(n, gaugeToNamedList(gauge));
- } else if (metric instanceof Meter) {
- Meter meter = (Meter) metric;
- response.add(n, meterToNamedList(meter));
- } else if (metric instanceof Timer) {
- Timer timer = (Timer) metric;
- response.add(n, timerToNamedList(timer));
- } else if (metric instanceof Histogram) {
- Histogram histogram = (Histogram) metric;
- response.add(n, histogramToNamedList(histogram));
- }
- });
+ Metric metric = metrics.get(n);
+ if (metric instanceof Counter) {
+ Counter counter = (Counter) metric;
+ consumer.accept(n, counterToMap(counter));
+ } else if (metric instanceof Gauge) {
+ Gauge gauge = (Gauge) metric;
+ consumer.accept(n, gaugeToMap(gauge));
+ } else if (metric instanceof Meter) {
+ Meter meter = (Meter) metric;
+ consumer.accept(n, meterToMap(meter));
+ } else if (metric instanceof Timer) {
+ Timer timer = (Timer) metric;
+ consumer.accept(n, timerToMap(timer, skipHistograms));
+ } else if (metric instanceof Histogram) {
+ if (!skipHistograms) {
+ Histogram histogram = (Histogram) metric;
+ consumer.accept(n, histogramToMap(histogram));
+ }
+ } else if (metric instanceof AggregateMetric) {
+ consumer.accept(n, aggregateMetricToMap((AggregateMetric)metric, skipAggregateValues));
+ }
+ });
+ }
+
+ static Map<String, Object> aggregateMetricToMap(AggregateMetric metric, boolean skipAggregateValues) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", metric.size());
+ response.put(MAX, metric.getMax());
+ response.put(MIN, metric.getMin());
+ response.put(MEAN, metric.getMean());
+ response.put(STDDEV, metric.getStdDev());
+ response.put(SUM, metric.getSum());
+ if (!(metric.isEmpty() || skipAggregateValues)) {
+ Map<String, Object> values = new LinkedHashMap<>();
+ response.put(VALUES, values);
+ metric.getValues().forEach((k, v) -> {
+ Map<String, Object> map = new LinkedHashMap<>();
+ map.put("value", v.value);
+ map.put("updateCount", v.updateCount.get());
+ values.put(k, map);
+ });
+ }
return response;
}
- static NamedList histogramToNamedList(Histogram histogram) {
- NamedList response = new NamedList();
+ static Map<String, Object> histogramToMap(Histogram histogram) {
+ Map<String, Object> response = new LinkedHashMap<>();
Snapshot snapshot = histogram.getSnapshot();
- response.add("count", histogram.getCount());
+ response.put("count", histogram.getCount());
// non-time based values
addSnapshot(response, snapshot, false);
return response;
@@ -126,71 +254,52 @@ public class MetricUtils {
}
}
- static final String MS = "_ms";
-
- static final String MIN = "min";
- static final String MIN_MS = MIN + MS;
- static final String MAX = "max";
- static final String MAX_MS = MAX + MS;
- static final String MEAN = "mean";
- static final String MEAN_MS = MEAN + MS;
- static final String MEDIAN = "median";
- static final String MEDIAN_MS = MEDIAN + MS;
- static final String STDDEV = "stddev";
- static final String STDDEV_MS = STDDEV + MS;
- static final String P75 = "p75";
- static final String P75_MS = P75 + MS;
- static final String P95 = "p95";
- static final String P95_MS = P95 + MS;
- static final String P99 = "p99";
- static final String P99_MS = P99 + MS;
- static final String P999 = "p999";
- static final String P999_MS = P999 + MS;
-
// some snapshots represent time in ns, other snapshots represent raw values (eg. chunk size)
- static void addSnapshot(NamedList response, Snapshot snapshot, boolean ms) {
- response.add((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
- response.add((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
- response.add((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
- response.add((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
- response.add((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
- response.add((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
- response.add((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
- response.add((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
- response.add((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
- }
-
- static NamedList timerToNamedList(Timer timer) {
- NamedList response = new NamedList();
- response.add("count", timer.getCount());
- response.add("meanRate", timer.getMeanRate());
- response.add("1minRate", timer.getOneMinuteRate());
- response.add("5minRate", timer.getFiveMinuteRate());
- response.add("15minRate", timer.getFifteenMinuteRate());
- // time-based values in nanoseconds
- addSnapshot(response, timer.getSnapshot(), true);
+ static void addSnapshot(Map<String, Object> response, Snapshot snapshot, boolean ms) {
+ response.put((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
+ response.put((ms ? MAX_MS: MAX), nsToMs(ms, snapshot.getMax()));
+ response.put((ms ? MEAN_MS : MEAN), nsToMs(ms, snapshot.getMean()));
+ response.put((ms ? MEDIAN_MS: MEDIAN), nsToMs(ms, snapshot.getMedian()));
+ response.put((ms ? STDDEV_MS: STDDEV), nsToMs(ms, snapshot.getStdDev()));
+ response.put((ms ? P75_MS: P75), nsToMs(ms, snapshot.get75thPercentile()));
+ response.put((ms ? P95_MS: P95), nsToMs(ms, snapshot.get95thPercentile()));
+ response.put((ms ? P99_MS: P99), nsToMs(ms, snapshot.get99thPercentile()));
+ response.put((ms ? P999_MS: P999), nsToMs(ms, snapshot.get999thPercentile()));
+ }
+
+ static Map<String,Object> timerToMap(Timer timer, boolean skipHistograms) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", timer.getCount());
+ response.put("meanRate", timer.getMeanRate());
+ response.put("1minRate", timer.getOneMinuteRate());
+ response.put("5minRate", timer.getFiveMinuteRate());
+ response.put("15minRate", timer.getFifteenMinuteRate());
+ if (!skipHistograms) {
+ // time-based values in nanoseconds
+ addSnapshot(response, timer.getSnapshot(), true);
+ }
return response;
}
- static NamedList meterToNamedList(Meter meter) {
- NamedList response = new NamedList();
- response.add("count", meter.getCount());
- response.add("meanRate", meter.getMeanRate());
- response.add("1minRate", meter.getOneMinuteRate());
- response.add("5minRate", meter.getFiveMinuteRate());
- response.add("15minRate", meter.getFifteenMinuteRate());
+ static Map<String, Object> meterToMap(Meter meter) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", meter.getCount());
+ response.put("meanRate", meter.getMeanRate());
+ response.put("1minRate", meter.getOneMinuteRate());
+ response.put("5minRate", meter.getFiveMinuteRate());
+ response.put("15minRate", meter.getFifteenMinuteRate());
return response;
}
- static NamedList gaugeToNamedList(Gauge gauge) {
- NamedList response = new NamedList();
- response.add("value", gauge.getValue());
+ static Map<String, Object> gaugeToMap(Gauge gauge) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("value", gauge.getValue());
return response;
}
- static NamedList counterToNamedList(Counter counter) {
- NamedList response = new NamedList();
- response.add("count", counter.getCount());
+ static Map<String, Object> counterToMap(Counter counter) {
+ Map<String, Object> response = new LinkedHashMap<>();
+ response.put("count", counter.getCount());
return response;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test-files/solr/solr-solrreporter.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/solr-solrreporter.xml b/solr/core/src/test-files/solr/solr-solrreporter.xml
new file mode 100644
index 0000000..db03e42
--- /dev/null
+++ b/solr/core/src/test-files/solr/solr-solrreporter.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<solr>
+ <shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
+ <str name="urlScheme">${urlScheme:}</str>
+ <int name="socketTimeout">${socketTimeout:90000}</int>
+ <int name="connTimeout">${connTimeout:15000}</int>
+ </shardHandlerFactory>
+
+ <solrcloud>
+ <str name="host">127.0.0.1</str>
+ <int name="hostPort">${hostPort:8983}</int>
+ <str name="hostContext">${hostContext:solr}</str>
+ <int name="zkClientTimeout">${solr.zkclienttimeout:30000}</int>
+ <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+ <int name="leaderVoteWait">${leaderVoteWait:10000}</int>
+ <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:45000}</int>
+ <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:340000}</int>
+ <int name="autoReplicaFailoverWaitAfterExpiration">${autoReplicaFailoverWaitAfterExpiration:10000}</int>
+ <int name="autoReplicaFailoverWorkLoopDelay">${autoReplicaFailoverWorkLoopDelay:10000}</int>
+ <int name="autoReplicaFailoverBadNodeExpiration">${autoReplicaFailoverBadNodeExpiration:60000}</int>
+ </solrcloud>
+
+ <metrics>
+ <reporter name="test" group="shard">
+ <int name="period">5</int>
+ <str name="filter">UPDATE\./update/.*requests</str>
+ <str name="filter">QUERY\./select.*requests</str>
+ </reporter>
+ <reporter name="test" group="cluster">
+ <str name="handler">/admin/metrics/collector</str>
+ <int name="period">5</int>
+ <lst name="report">
+ <str name="group">cluster</str>
+ <str name="label">jvm</str>
+ <str name="registry">solr\.jvm</str>
+ <str name="filter">memory\.total\..*</str>
+ <str name="filter">memory\.heap\..*</str>
+ <str name="filter">os\.SystemLoadAverage</str>
+ <str name="filter">threads\.count</str>
+ </lst>
+ <lst name="report">
+ <str name="group">cluster</str>
+ <str name="label">leader.$1</str>
+ <str name="registry">solr\.collection\.(.*)\.leader</str>
+ <str name="filter">UPDATE\./update/.*</str>
+ </lst>
+ </reporter>
+ </metrics>
+</solr>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
index 164eeab..1af09f4 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudRecovery.java
@@ -119,9 +119,9 @@ public class TestCloudRecovery extends SolrCloudTestCase {
.filter(s -> s.startsWith("solr.core.")).collect(Collectors.toList());
for (String registry : registryNames) {
Map<String, Metric> metrics = manager.registry(registry).getMetrics();
- Timer timer = (Timer)metrics.get("REPLICATION.time");
- Counter counter = (Counter)metrics.get("REPLICATION.errors");
- Counter skipped = (Counter)metrics.get("REPLICATION.skipped");
+ Timer timer = (Timer)metrics.get("REPLICATION.peerSync.time");
+ Counter counter = (Counter)metrics.get("REPLICATION.peerSync.errors");
+ Counter skipped = (Counter)metrics.get("REPLICATION.peerSync.skipped");
replicationCount += timer.getCount();
errorsCount += counter.getCount();
skippedCount += skipped.getCount();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java b/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
index 2cad6e8..aa107bc 100644
--- a/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
+++ b/solr/core/src/test/org/apache/solr/core/TestJmxMonitoredMap.java
@@ -85,7 +85,7 @@ public class TestJmxMonitoredMap extends LuceneTestCase {
log.info("Using port: " + port);
String url = "service:jmx:rmi:///jndi/rmi://127.0.0.1:"+port+"/solrjmx";
JmxConfiguration config = new JmxConfiguration(true, null, url, null);
- monitoredMap = new JmxMonitoredMap<>("", "", config);
+ monitoredMap = new JmxMonitoredMap<>("", "", "", config);
JMXServiceURL u = new JMXServiceURL(url);
connector = JMXConnectorFactory.connect(u);
mbeanServer = connector.getMBeanServerConnection();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
index 1df6021..6e8e1e5 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrCoreMetricManagerTest.java
@@ -103,6 +103,7 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
String className = MockMetricReporter.class.getName();
String reporterName = TestUtil.randomUnicodeString(random);
+ String taggedName = reporterName + "@" + coreMetricManager.getTag();
Map<String, Object> attrs = new HashMap<>();
attrs.put(FieldType.CLASS_NAME, className);
@@ -116,15 +117,16 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
PluginInfo pluginInfo = shouldDefinePlugin ? new PluginInfo(TestUtil.randomUnicodeString(random), attrs) : null;
try {
- metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
+ metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+ pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
assertNotNull(pluginInfo);
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0);
- assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(reporterName));
- assertTrue("wrong reporter class: " + reporters.get(reporterName), reporters.get(reporterName) instanceof MockMetricReporter);
+ assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(taggedName));
+ assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof MockMetricReporter);
} catch (IllegalArgumentException e) {
assertTrue(pluginInfo == null || attrs.get("configurable") == null);
- assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(reporterName));
+ assertNull(metricManager.getReporters(coreMetricManager.getRegistryName()).get(taggedName));
}
}
@@ -152,20 +154,11 @@ public class SolrCoreMetricManagerTest extends SolrTestCaseJ4 {
}
@Test
- public void testRegistryName() throws Exception {
- String collectionName = "my_collection_";
- String cloudCoreName = "my_collection__shard1_0_replica0";
- String simpleCoreName = "collection_1_replica0";
- String simpleRegistryName = "solr.core." + simpleCoreName;
- String cloudRegistryName = "solr.core." + cloudCoreName;
- String nestedRegistryName = "solr.core.my_collection_.shard1_0.replica0";
- // pass through
- assertEquals(cloudRegistryName, coreMetricManager.createRegistryName(null, cloudCoreName));
- assertEquals(simpleRegistryName, coreMetricManager.createRegistryName(null, simpleCoreName));
- // unknown naming scheme -> pass through
- assertEquals(simpleRegistryName, coreMetricManager.createRegistryName(collectionName, simpleCoreName));
- // cloud collection
- assertEquals(nestedRegistryName, coreMetricManager.createRegistryName(collectionName, cloudCoreName));
-
+ public void testNonCloudRegistryName() throws Exception {
+ String registryName = h.getCore().getCoreMetricManager().getRegistryName();
+ String leaderRegistryName = h.getCore().getCoreMetricManager().getLeaderRegistryName();
+ assertNotNull(registryName);
+ assertEquals("solr.core.collection1", registryName);
+ assertNull(leaderRegistryName);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
index ee2acd3..1c29c5e 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
@@ -205,32 +205,32 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 {
createPluginInfo("node_foo", "node", null),
createPluginInfo("core_foo", "core", null)
};
-
- metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.node);
+ String tag = "xyz";
+ metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.node);
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
assertEquals(4, reporters.size());
- assertTrue(reporters.containsKey("universal_foo"));
- assertTrue(reporters.containsKey("multigroup_foo"));
- assertTrue(reporters.containsKey("node_foo"));
- assertTrue(reporters.containsKey("multiregistry_foo"));
+ assertTrue(reporters.containsKey("universal_foo@" + tag));
+ assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+ assertTrue(reporters.containsKey("node_foo@" + tag));
+ assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
- metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.core, "collection1");
+ metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.core, "collection1");
reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, "collection1"));
assertEquals(5, reporters.size());
- assertTrue(reporters.containsKey("universal_foo"));
- assertTrue(reporters.containsKey("multigroup_foo"));
- assertTrue(reporters.containsKey("specific_foo"));
- assertTrue(reporters.containsKey("core_foo"));
- assertTrue(reporters.containsKey("multiregistry_foo"));
+ assertTrue(reporters.containsKey("universal_foo@" + tag));
+ assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+ assertTrue(reporters.containsKey("specific_foo@" + tag));
+ assertTrue(reporters.containsKey("core_foo@" + tag));
+ assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
- metricManager.loadReporters(plugins, loader, SolrInfoMBean.Group.jvm);
+ metricManager.loadReporters(plugins, loader, tag, SolrInfoMBean.Group.jvm);
reporters = metricManager.getReporters(
SolrMetricManager.getRegistryName(SolrInfoMBean.Group.jvm));
assertEquals(2, reporters.size());
- assertTrue(reporters.containsKey("universal_foo"));
- assertTrue(reporters.containsKey("multigroup_foo"));
+ assertTrue(reporters.containsKey("universal_foo@" + tag));
+ assertTrue(reporters.containsKey("multigroup_foo@" + tag));
metricManager.removeRegistry("solr.jvm");
reporters = metricManager.getReporters(
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
index 27c038b..dfb5a0f 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java
@@ -19,7 +19,6 @@ package org.apache.solr.metrics;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.Arrays;
import java.util.Map;
import java.util.Random;
@@ -55,6 +54,11 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
private CoreContainer cc;
private SolrMetricManager metricManager;
+ private String tag;
+
+ private void assertTagged(Map<String, SolrMetricReporter> reporters, String name) {
+ assertTrue("Reporter '" + name + "' missing in " + reporters, reporters.containsKey(name + "@" + tag));
+ }
@Before
public void beforeTest() throws Exception {
@@ -68,10 +72,13 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
new TestHarness.TestCoresLocator(DEFAULT_TEST_CORENAME, initCoreDataDir.getAbsolutePath(), "solrconfig.xml", "schema.xml"));
h.coreName = DEFAULT_TEST_CORENAME;
metricManager = cc.getMetricManager();
+ tag = h.getCore().getCoreMetricManager().getTag();
// initially there are more reporters, because two of them are added via a matching collection name
Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.core." + DEFAULT_TEST_CORENAME);
assertEquals(INITIAL_REPORTERS.length, reporters.size());
- assertTrue(reporters.keySet().containsAll(Arrays.asList(INITIAL_REPORTERS)));
+ for (String r : INITIAL_REPORTERS) {
+ assertTagged(reporters, r);
+ }
// test rename operation
cc.rename(DEFAULT_TEST_CORENAME, CORE_NAME);
h.coreName = CORE_NAME;
@@ -101,7 +108,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
deleteCore();
for (String reporterName : RENAMED_REPORTERS) {
- SolrMetricReporter reporter = reporters.get(reporterName);
+ SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag);
MockMetricReporter mockReporter = (MockMetricReporter) reporter;
assertTrue("Reporter " + reporterName + " was not closed: " + mockReporter, mockReporter.didClose);
}
@@ -130,7 +137,7 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 {
// SPECIFIC and MULTIREGISTRY were skipped because they were
// specific to collection1
for (String reporterName : RENAMED_REPORTERS) {
- SolrMetricReporter reporter = reporters.get(reporterName);
+ SolrMetricReporter reporter = reporters.get(reporterName + "@" + tag);
assertNotNull("Reporter " + reporterName + " was not found.", reporter);
assertTrue(reporter instanceof MockMetricReporter);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
index ea452b2..82b9d58 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/SolrJmxReporterTest.java
@@ -64,15 +64,17 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
coreMetricManager = core.getCoreMetricManager();
metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
PluginInfo pluginInfo = createReporterPluginInfo();
- metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
+ metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+ pluginInfo, coreMetricManager.getTag());
Map<String, SolrMetricReporter> reporters = metricManager.getReporters(coreMetricManager.getRegistryName());
assertTrue("reporters.size should be > 0, but was + " + reporters.size(), reporters.size() > 0);
reporterName = pluginInfo.name;
- assertNotNull("reporter " + reporterName + " not present among " + reporters, reporters.get(reporterName));
- assertTrue("wrong reporter class: " + reporters.get(reporterName), reporters.get(reporterName) instanceof SolrJmxReporter);
+ String taggedName = reporterName + "@" + coreMetricManager.getTag();
+ assertNotNull("reporter " + taggedName + " not present among " + reporters, reporters.get(taggedName));
+ assertTrue("wrong reporter class: " + reporters.get(taggedName), reporters.get(taggedName) instanceof SolrJmxReporter);
- reporter = (SolrJmxReporter) reporters.get(reporterName);
+ reporter = (SolrJmxReporter) reporters.get(taggedName);
mBeanServer = reporter.getMBeanServer();
assertNotNull("MBean server not found.", mBeanServer);
}
@@ -144,7 +146,8 @@ public class SolrJmxReporterTest extends SolrTestCaseJ4 {
h.getCoreContainer().reload(h.getCore().getName());
PluginInfo pluginInfo = createReporterPluginInfo();
- metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(), pluginInfo);
+ metricManager.loadReporter(coreMetricManager.getRegistryName(), coreMetricManager.getCore().getResourceLoader(),
+ pluginInfo, String.valueOf(coreMetricManager.getCore().hashCode()));
coreMetricManager.registerMetricProducer(scope, producer);
objects = mBeanServer.queryMBeans(null, null);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
new file mode 100644
index 0000000..91952b8
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.nio.file.Paths;
+import java.util.Map;
+
+import com.codahale.metrics.Metric;
+import org.apache.commons.io.IOUtils;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class SolrCloudReportersTest extends SolrCloudTestCase {
+ int leaderRegistries;
+ int clusterRegistries;
+
+
+ @BeforeClass
+ public static void configureDummyCluster() throws Exception {
+ configureCluster(0).configure();
+ }
+
+ @Before
+ public void closePreviousCluster() throws Exception {
+ shutdownCluster();
+ leaderRegistries = 0;
+ clusterRegistries = 0;
+ }
+
+ @Test
+ public void testExplicitConfiguration() throws Exception {
+ String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr-solrreporter.xml"), "UTF-8");
+ configureCluster(2)
+ .withSolrXml(solrXml).configure();
+ cluster.uploadConfigSet(Paths.get(TEST_PATH().toString(), "configsets", "minimal", "conf"), "test");
+ System.out.println("ZK: " + cluster.getZkServer().getZkAddress());
+ CollectionAdminRequest.createCollection("test_collection", "test", 2, 2)
+ .setMaxShardsPerNode(4)
+ .process(cluster.getSolrClient());
+ waitForState("Expected test_collection with 2 shards and 2 replicas", "test_collection", clusterShape(2, 2));
+ Thread.sleep(15000);
+ cluster.getJettySolrRunners().forEach(jetty -> {
+ CoreContainer cc = jetty.getCoreContainer();
+ // verify registry names
+ for (String name : cc.getCoreNames()) {
+ SolrCore core = cc.getCore(name);
+ try {
+ String registryName = core.getCoreMetricManager().getRegistryName();
+ String leaderRegistryName = core.getCoreMetricManager().getLeaderRegistryName();
+ String coreName = core.getName();
+ String collectionName = core.getCoreDescriptor().getCollectionName();
+ String coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
+ String replicaName = coreName.split("_")[3];
+ String shardId = core.getCoreDescriptor().getCloudDescriptor().getShardId();
+
+ assertEquals("solr.core." + collectionName + "." + shardId + "." + replicaName, registryName);
+ assertEquals("solr.collection." + collectionName + "." + shardId + ".leader", leaderRegistryName);
+
+ } finally {
+ if (core != null) {
+ core.close();
+ }
+ }
+ }
+ SolrMetricManager metricManager = cc.getMetricManager();
+ Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.cluster");
+ assertEquals(reporters.toString(), 1, reporters.size());
+ SolrMetricReporter reporter = reporters.get("test");
+ assertNotNull(reporter);
+ assertTrue(reporter.toString(), reporter instanceof SolrClusterReporter);
+ SolrClusterReporter sor = (SolrClusterReporter)reporter;
+ assertEquals(5, sor.getPeriod());
+ for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
+ reporters = metricManager.getReporters(registryName);
+ assertEquals(reporters.toString(), 1, reporters.size());
+ reporter = null;
+ for (String name : reporters.keySet()) {
+ if (name.startsWith("test")) {
+ reporter = reporters.get(name);
+ }
+ }
+ assertNotNull(reporter);
+ assertTrue(reporter.toString(), reporter instanceof SolrShardReporter);
+ SolrShardReporter srr = (SolrShardReporter)reporter;
+ assertEquals(5, srr.getPeriod());
+ }
+ for (String registryName : metricManager.registryNames(".*\\.leader")) {
+ leaderRegistries++;
+ reporters = metricManager.getReporters(registryName);
+ // no reporters registered for leader registry
+ assertEquals(reporters.toString(), 0, reporters.size());
+ // verify specific metrics
+ Map<String, Metric> metrics = metricManager.registry(registryName).getMetrics();
+ String key = "QUERY./select.requests.count";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ key = "UPDATE./update/json.requests.count";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ }
+ if (metricManager.registryNames().contains("solr.cluster")) {
+ clusterRegistries++;
+ Map<String,Metric> metrics = metricManager.registry("solr.cluster").getMetrics();
+ String key = "jvm.memory.heap.init.value";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ key = "leader.test_collection.shard1.UPDATE./update/json.requests.count.max";
+ assertTrue(key, metrics.containsKey(key));
+ assertTrue(key, metrics.get(key) instanceof AggregateMetric);
+ }
+ });
+ assertEquals("leaderRegistries", 2, leaderRegistries);
+ assertEquals("clusterRegistries", 1, clusterRegistries);
+ }
+
+ @Test
+ public void testDefaultPlugins() throws Exception {
+ String solrXml = IOUtils.toString(SolrCloudReportersTest.class.getResourceAsStream("/solr/solr.xml"), "UTF-8");
+ configureCluster(2)
+ .withSolrXml(solrXml).configure();
+ cluster.uploadConfigSet(Paths.get(TEST_PATH().toString(), "configsets", "minimal", "conf"), "test");
+ System.out.println("ZK: " + cluster.getZkServer().getZkAddress());
+ CollectionAdminRequest.createCollection("test_collection", "test", 2, 2)
+ .setMaxShardsPerNode(4)
+ .process(cluster.getSolrClient());
+ waitForState("Expected test_collection with 2 shards and 2 replicas", "test_collection", clusterShape(2, 2));
+ cluster.getJettySolrRunners().forEach(jetty -> {
+ CoreContainer cc = jetty.getCoreContainer();
+ SolrMetricManager metricManager = cc.getMetricManager();
+ Map<String, SolrMetricReporter> reporters = metricManager.getReporters("solr.cluster");
+ assertEquals(reporters.toString(), 0, reporters.size());
+ for (String registryName : metricManager.registryNames(".*\\.shard[0-9]\\.replica.*")) {
+ reporters = metricManager.getReporters(registryName);
+ assertEquals(reporters.toString(), 0, reporters.size());
+ }
+ });
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
new file mode 100644
index 0000000..9ce3762
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrShardReporterTest.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Map;
+
+import com.codahale.metrics.Metric;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.CoreDescriptor;
+import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrCoreMetricManager;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class SolrShardReporterTest extends AbstractFullDistribZkTestBase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public SolrShardReporterTest() {
+ schemaString = "schema15.xml"; // we need a string id
+ }
+
+ @Override
+ public String getSolrXml() {
+ return "solr-solrreporter.xml";
+ }
+
+ @Test
+ public void test() throws Exception {
+ waitForRecoveriesToFinish("control_collection",
+ jettys.get(0).getCoreContainer().getZkController().getZkStateReader(), false);
+ waitForRecoveriesToFinish("collection1",
+ jettys.get(0).getCoreContainer().getZkController().getZkStateReader(), false);
+ printLayout();
+ // wait for at least two reports
+ Thread.sleep(10000);
+ ClusterState state = jettys.get(0).getCoreContainer().getZkController().getClusterState();
+ for (JettySolrRunner jetty : jettys) {
+ CoreContainer cc = jetty.getCoreContainer();
+ SolrMetricManager metricManager = cc.getMetricManager();
+ for (final String coreName : cc.getCoreNames()) {
+ CoreDescriptor cd = cc.getCoreDescriptor(coreName);
+ if (cd.getCloudDescriptor() == null) { // not a cloud collection
+ continue;
+ }
+ CloudDescriptor cloudDesc = cd.getCloudDescriptor();
+ DocCollection docCollection = state.getCollection(cloudDesc.getCollectionName());
+ String replicaName = SolrCoreMetricManager.parseReplicaName(cloudDesc.getCollectionName(), coreName);
+ if (replicaName == null) {
+ replicaName = cloudDesc.getCoreNodeName();
+ }
+ String registryName = SolrCoreMetricManager.createRegistryName(true,
+ cloudDesc.getCollectionName(), cloudDesc.getShardId(), replicaName, null);
+ String leaderRegistryName = SolrCoreMetricManager.createLeaderRegistryName(true,
+ cloudDesc.getCollectionName(), cloudDesc.getShardId());
+ boolean leader = cloudDesc.isLeader();
+ Slice slice = docCollection.getSlice(cloudDesc.getShardId());
+ int numReplicas = slice.getReplicas().size();
+ if (leader) {
+ assertTrue(metricManager.registryNames() + " doesn't contain " + leaderRegistryName,
+ metricManager.registryNames().contains(leaderRegistryName));
+ Map<String, Metric> metrics = metricManager.registry(leaderRegistryName).getMetrics();
+ metrics.forEach((k, v) -> {
+ assertTrue("Unexpected type of " + k + ": " + v.getClass().getName() + ", " + v,
+ v instanceof AggregateMetric);
+ AggregateMetric am = (AggregateMetric)v;
+ if (!k.startsWith("REPLICATION.peerSync")) {
+ assertEquals(coreName + "::" + registryName + "::" + k + ": " + am.toString(), numReplicas, am.size());
+ }
+ });
+ } else {
+ assertFalse(metricManager.registryNames() + " contains " + leaderRegistryName +
+ " but it's not a leader!",
+ metricManager.registryNames().contains(leaderRegistryName));
+ Map<String, Metric> metrics = metricManager.registry(leaderRegistryName).getMetrics();
+ metrics.forEach((k, v) -> {
+ assertTrue("Unexpected type of " + k + ": " + v.getClass().getName() + ", " + v,
+ v instanceof AggregateMetric);
+ AggregateMetric am = (AggregateMetric)v;
+ if (!k.startsWith("REPLICATION.peerSync")) {
+ assertEquals(coreName + "::" + registryName + "::" + k + ": " + am.toString(), 1, am.size());
+ }
+ });
+ }
+ assertTrue(metricManager.registryNames() + " doesn't contain " + registryName,
+ metricManager.registryNames().contains(registryName));
+ }
+ }
+ SolrMetricManager metricManager = controlJetty.getCoreContainer().getMetricManager();
+ assertTrue(metricManager.registryNames().contains("solr.cluster"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
index e39ad6e..8717ad6 100644
--- a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
@@ -17,12 +17,20 @@
package org.apache.solr.util.stats;
+import java.util.Collections;
+import java.util.Map;
import java.util.concurrent.TimeUnit;
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricFilter;
+import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.Timer;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.util.NamedList;
+import org.apache.solr.metrics.AggregateMetric;
import org.junit.Test;
public class MetricUtilsTest extends SolrTestCaseJ4 {
@@ -36,7 +44,7 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
timer.update(Math.abs(random().nextInt()) + 1, TimeUnit.NANOSECONDS);
}
// obtain timer metrics
- NamedList lst = MetricUtils.timerToNamedList(timer);
+ NamedList lst = new NamedList(MetricUtils.timerToMap(timer, false));
// check that expected metrics were obtained
assertEquals(14, lst.size());
final Snapshot snapshot = timer.getSnapshot();
@@ -52,5 +60,49 @@ public class MetricUtilsTest extends SolrTestCaseJ4 {
assertEquals(MetricUtils.nsToMs(snapshot.get999thPercentile()), lst.get("p999_ms"));
}
+ @Test
+ public void testMetrics() throws Exception {
+ MetricRegistry registry = new MetricRegistry();
+ Counter counter = registry.counter("counter");
+ counter.inc();
+ Timer timer = registry.timer("timer");
+ Timer.Context ctx = timer.time();
+ Thread.sleep(150);
+ ctx.stop();
+ Meter meter = registry.meter("meter");
+ meter.mark();
+ Histogram histogram = registry.histogram("histogram");
+ histogram.update(10);
+ AggregateMetric am = new AggregateMetric();
+ registry.register("aggregate", am);
+ am.set("foo", 10);
+ am.set("bar", 1);
+ am.set("bar", 2);
+ MetricUtils.toNamedMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
+ false, false, (k, v) -> {
+ if (k.startsWith("counter")) {
+ assertEquals(1L, v.get("count"));
+ } else if (k.startsWith("timer")) {
+ assertEquals(1L, v.get("count"));
+ assertTrue(((Number)v.get("min_ms")).intValue() > 100);
+ } else if (k.startsWith("meter")) {
+ assertEquals(1L, v.get("count"));
+ } else if (k.startsWith("histogram")) {
+ assertEquals(1L, v.get("count"));
+ } else if (k.startsWith("aggregate")) {
+ assertEquals(2, v.get("count"));
+ Map<String, Object> values = (Map<String, Object>)v.get("values");
+ assertNotNull(values);
+ assertEquals(2, values.size());
+ Map<String, Object> update = (Map<String, Object>)values.get("foo");
+ assertEquals(10, update.get("value"));
+ assertEquals(1, update.get("updateCount"));
+ update = (Map<String, Object>)values.get("bar");
+ assertEquals(2, update.get("value"));
+ assertEquals(2, update.get("updateCount"));
+ }
+ });
+ }
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
index 67274c2..310c282 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BinaryRequestWriter.java
@@ -112,8 +112,8 @@ public class BinaryRequestWriter extends RequestWriter {
/*
* A hack to get access to the protected internal buffer and avoid an additional copy
*/
- class BAOS extends ByteArrayOutputStream {
- byte[] getbuf() {
+ public static class BAOS extends ByteArrayOutputStream {
+ public byte[] getbuf() {
return super.buf;
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
index da94162..132a1a8 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java
@@ -22,6 +22,7 @@ import java.lang.invoke.MethodHandles;
import java.util.Map;
import java.util.HashMap;
+import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -38,15 +39,27 @@ public class SolrClientCache implements Serializable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final Map<String, SolrClient> solrClients = new HashMap<>();
+ private final HttpClient httpClient;
+
+ public SolrClientCache() {
+ httpClient = null;
+ }
+
+ public SolrClientCache(HttpClient httpClient) {
+ this.httpClient = httpClient;
+ }
public synchronized CloudSolrClient getCloudSolrClient(String zkHost) {
CloudSolrClient client;
if (solrClients.containsKey(zkHost)) {
client = (CloudSolrClient) solrClients.get(zkHost);
} else {
- client = new CloudSolrClient.Builder()
- .withZkHost(zkHost)
- .build();
+ CloudSolrClient.Builder builder = new CloudSolrClient.Builder()
+ .withZkHost(zkHost);
+ if (httpClient != null) {
+ builder = builder.withHttpClient(httpClient);
+ }
+ client = builder.build();
client.connect();
solrClients.put(zkHost, client);
}
@@ -59,8 +72,11 @@ public class SolrClientCache implements Serializable {
if (solrClients.containsKey(host)) {
client = (HttpSolrClient) solrClients.get(host);
} else {
- client = new HttpSolrClient.Builder(host)
- .build();
+ HttpSolrClient.Builder builder = new HttpSolrClient.Builder(host);
+ if (httpClient != null) {
+ builder = builder.withHttpClient(httpClient);
+ }
+ client = builder.build();
solrClients.put(host, client);
}
return client;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
index b2174cd..de7c620 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
@@ -251,8 +251,8 @@ public class TestCoreAdmin extends AbstractEmbeddedSolrServerTestCase {
// assert initial metrics
SolrMetricManager metricManager = cores.getMetricManager();
- String core0RegistryName = SolrCoreMetricManager.createRegistryName(null, "core0");
- String core1RegistryName = SolrCoreMetricManager.createRegistryName(null, "core1");
+ String core0RegistryName = SolrCoreMetricManager.createRegistryName(false, null, null, null, "core0");
+ String core1RegistryName = SolrCoreMetricManager.createRegistryName(false, null, null,null, "core1");
MetricRegistry core0Registry = metricManager.registry(core0RegistryName);
MetricRegistry core1Registry = metricManager.registry(core1RegistryName);
[10/19] lucene-solr:jira/solr-9835: SOLR-9858: Collect aggregated
metrics from nodes and shard leaders in overseer.
Posted by da...@apache.org.
SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4d7bc947
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4d7bc947
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4d7bc947
Branch: refs/heads/jira/solr-9835
Commit: 4d7bc9477144937335e997ad630c4b89f558ddc5
Parents: a6e14ec
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Tue Mar 7 22:00:38 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Tue Mar 7 22:01:21 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 4 +
.../org/apache/solr/cloud/ElectionContext.java | 5 +-
.../java/org/apache/solr/cloud/Overseer.java | 7 +-
.../solr/cloud/OverseerNodePrioritizer.java | 2 +-
.../solr/cloud/OverseerTaskProcessor.java | 6 +-
.../org/apache/solr/cloud/ZkController.java | 2 +-
.../org/apache/solr/core/CoreContainer.java | 30 +-
.../org/apache/solr/core/JmxMonitoredMap.java | 9 +-
.../src/java/org/apache/solr/core/SolrCore.java | 4 +-
.../org/apache/solr/core/SolrInfoMBean.java | 4 +-
.../org/apache/solr/core/SolrXmlConfig.java | 3 +-
.../handler/admin/MetricsCollectorHandler.java | 228 +++++++++++
.../solr/handler/admin/MetricsHandler.java | 2 +-
.../apache/solr/metrics/AggregateMetric.java | 200 ++++++++++
.../solr/metrics/SolrCoreMetricManager.java | 125 +++++-
.../apache/solr/metrics/SolrMetricManager.java | 325 ++++++++++++++-
.../metrics/reporters/JmxObjectNameFactory.java | 6 +-
.../reporters/solr/SolrClusterReporter.java | 277 +++++++++++++
.../metrics/reporters/solr/SolrReporter.java | 392 +++++++++++++++++++
.../reporters/solr/SolrShardReporter.java | 188 +++++++++
.../metrics/reporters/solr/package-info.java | 22 ++
.../java/org/apache/solr/update/PeerSync.java | 8 +-
.../org/apache/solr/util/stats/MetricUtils.java | 265 +++++++++----
.../src/test-files/solr/solr-solrreporter.xml | 66 ++++
.../apache/solr/cloud/TestCloudRecovery.java | 6 +-
.../apache/solr/core/TestJmxMonitoredMap.java | 2 +-
.../solr/metrics/SolrCoreMetricManagerTest.java | 31 +-
.../solr/metrics/SolrMetricManagerTest.java | 30 +-
.../metrics/SolrMetricsIntegrationTest.java | 15 +-
.../metrics/reporters/SolrJmxReporterTest.java | 13 +-
.../reporters/solr/SolrCloudReportersTest.java | 163 ++++++++
.../reporters/solr/SolrShardReporterTest.java | 117 ++++++
.../apache/solr/util/stats/MetricUtilsTest.java | 54 ++-
.../client/solrj/impl/BinaryRequestWriter.java | 4 +-
.../solr/client/solrj/io/SolrClientCache.java | 26 +-
.../client/solrj/request/TestCoreAdmin.java | 4 +-
36 files changed, 2435 insertions(+), 210 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index dc97456..0e78535 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -50,6 +50,10 @@ Upgrading from Solr 6.x
factors should be indexed in a separate field and combined with the query
score using a function query.
+New Features
+----------------------
+* SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab)
+
Bug Fixes
----------------------
* SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index ff6fb30..d3ad322 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -714,14 +714,13 @@ final class OverseerElectionContext extends ElectionContext {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrZkClient zkClient;
private Overseer overseer;
- public static final String OVERSEER_ELECT = "/overseer_elect";
public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
- super(zkNodeName, OVERSEER_ELECT, OVERSEER_ELECT + "/leader", null, zkClient);
+ super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient);
this.overseer = overseer;
this.zkClient = zkClient;
try {
- new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(OVERSEER_ELECT, zkClient);
+ new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient);
} catch (KeeperException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, e);
} catch (InterruptedException e) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/Overseer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 3a8aa3e..61f15fc 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -65,7 +65,8 @@ public class Overseer implements Closeable {
public static final int STATE_UPDATE_DELAY = 1500; // delay between cloud state updates
public static final int NUM_RESPONSES_TO_STORE = 10000;
-
+ public static final String OVERSEER_ELECT = "/overseer_elect";
+
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
enum LeaderStatus {DONT_KNOW, NO, YES}
@@ -281,7 +282,7 @@ public class Overseer implements Closeable {
private void checkIfIamStillLeader() {
if (zkController != null && zkController.getCoreContainer().isShutDown()) return;//shutting down no need to go further
org.apache.zookeeper.data.Stat stat = new org.apache.zookeeper.data.Stat();
- String path = OverseerElectionContext.OVERSEER_ELECT + "/leader";
+ String path = OVERSEER_ELECT + "/leader";
byte[] data;
try {
data = zkClient.getData(path, null, stat, true);
@@ -394,7 +395,7 @@ public class Overseer implements Closeable {
boolean success = true;
try {
ZkNodeProps props = ZkNodeProps.load(zkClient.getData(
- OverseerElectionContext.OVERSEER_ELECT + "/leader", null, null, true));
+ OVERSEER_ELECT + "/leader", null, null, true));
if (myId.equals(props.getStr("id"))) {
return LeaderStatus.YES;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
index 6512d26..798eca3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerNodePrioritizer.java
@@ -65,7 +65,7 @@ public class OverseerNodePrioritizer {
String ldr = OverseerTaskProcessor.getLeaderNode(zk);
if(overseerDesignates.contains(ldr)) return;
log.info("prioritizing overseer nodes at {} overseer designates are {}", overseerId, overseerDesignates);
- List<String> electionNodes = OverseerTaskProcessor.getSortedElectionNodes(zk, OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE);
+ List<String> electionNodes = OverseerTaskProcessor.getSortedElectionNodes(zk, Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE);
if(electionNodes.size()<2) return;
log.info("sorted nodes {}", electionNodes);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index ad53346..bed71a6 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -337,7 +337,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
public static List<String> getSortedOverseerNodeNames(SolrZkClient zk) throws KeeperException, InterruptedException {
List<String> children = null;
try {
- children = zk.getChildren(OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE, null, true);
+ children = zk.getChildren(Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE, null, true);
} catch (Exception e) {
log.warn("error ", e);
return new ArrayList<>();
@@ -370,7 +370,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
public static String getLeaderId(SolrZkClient zkClient) throws KeeperException,InterruptedException{
byte[] data = null;
try {
- data = zkClient.getData(OverseerElectionContext.OVERSEER_ELECT + "/leader", null, new Stat(), true);
+ data = zkClient.getData(Overseer.OVERSEER_ELECT + "/leader", null, new Stat(), true);
} catch (KeeperException.NoNodeException e) {
return null;
}
@@ -384,7 +384,7 @@ public class OverseerTaskProcessor implements Runnable, Closeable {
boolean success = true;
try {
ZkNodeProps props = ZkNodeProps.load(zkStateReader.getZkClient().getData(
- OverseerElectionContext.OVERSEER_ELECT + "/leader", null, null, true));
+ Overseer.OVERSEER_ELECT + "/leader", null, null, true));
if (myId.equals(props.getStr("id"))) {
return LeaderStatus.YES;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/cloud/ZkController.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index c083736..333acd4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1715,7 +1715,7 @@ public class ZkController {
//however delete it . This is possible when the last attempt at deleting the election node failed.
if (electionNode.startsWith(getNodeName())) {
try {
- zkClient.delete(OverseerElectionContext.OVERSEER_ELECT + LeaderElector.ELECTION_NODE + "/" + electionNode, -1, true);
+ zkClient.delete(Overseer.OVERSEER_ELECT + LeaderElector.ELECTION_NODE + "/" + electionNode, -1, true);
} catch (NoNodeException e) {
//no problem
} catch (InterruptedException e) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index e3977d7..b9597ae 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -69,6 +69,7 @@ import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.ConfigSetsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
import org.apache.solr.handler.admin.MetricsHandler;
import org.apache.solr.handler.admin.SecurityConfHandler;
import org.apache.solr.handler.admin.SecurityConfHandlerLocal;
@@ -177,6 +178,8 @@ public class CoreContainer {
protected MetricsHandler metricsHandler;
+ protected MetricsCollectorHandler metricsCollectorHandler;
+
private enum CoreInitFailedAction { fromleader, none }
/**
@@ -511,15 +514,18 @@ public class CoreContainer {
coreAdminHandler = createHandler(CORES_HANDLER_PATH, cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class);
configSetsHandler = createHandler(CONFIGSETS_HANDLER_PATH, cfg.getConfigSetsHandlerClass(), ConfigSetsHandler.class);
metricsHandler = createHandler(METRICS_PATH, MetricsHandler.class.getName(), MetricsHandler.class);
+ metricsCollectorHandler = createHandler(MetricsCollectorHandler.HANDLER_PATH, MetricsCollectorHandler.class.getName(), MetricsCollectorHandler.class);
+ // may want to add some configuration here in the future
+ metricsCollectorHandler.init(null);
containerHandlers.put(AUTHZ_PATH, securityConfHandler);
securityConfHandler.initializeMetrics(metricManager, SolrInfoMBean.Group.node.toString(), AUTHZ_PATH);
containerHandlers.put(AUTHC_PATH, securityConfHandler);
if(pkiAuthenticationPlugin != null)
containerHandlers.put(PKIAuthenticationPlugin.PATH, pkiAuthenticationPlugin.getRequestHandler());
- metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.node);
- metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.jvm);
- metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, SolrInfoMBean.Group.jetty);
+ metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.node);
+ metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jvm);
+ metricManager.loadReporters(cfg.getMetricReporterPlugins(), loader, null, SolrInfoMBean.Group.jetty);
coreConfigService = ConfigSetService.createConfigSetService(cfg, loader, zkSys.zkController);
@@ -537,6 +543,10 @@ public class CoreContainer {
metricManager.register(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node),
unloadedCores, true, "unloaded",SolrInfoMBean.Category.CONTAINER.toString(), "cores");
+ if (isZooKeeperAware()) {
+ metricManager.loadClusterReporters(cfg.getMetricReporterPlugins(), this);
+ }
+
// setup executor to load cores in parallel
ExecutorService coreLoadExecutor = MetricUtils.instrumentedExecutorService(
ExecutorUtil.newMDCAwareFixedThreadPool(
@@ -660,10 +670,16 @@ public class CoreContainer {
isShutDown = true;
ExecutorUtil.shutdownAndAwaitTermination(coreContainerWorkExecutor);
+ if (metricManager != null) {
+ metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
+ }
if (isZooKeeperAware()) {
cancelCoreRecoveries();
- zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
+ zkSys.zkController.publishNodeAsDown(zkSys.zkController.getNodeName());
+ if (metricManager != null) {
+ metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.cluster));
+ }
}
try {
@@ -722,10 +738,6 @@ public class CoreContainer {
}
}
- if (metricManager != null) {
- metricManager.closeReporters(SolrMetricManager.getRegistryName(SolrInfoMBean.Group.node));
- }
-
// It should be safe to close the authorization plugin at this point.
try {
if(authorizationPlugin != null) {
@@ -1232,7 +1244,7 @@ public class CoreContainer {
try (SolrCore core = getCore(name)) {
if (core != null) {
String oldRegistryName = core.getCoreMetricManager().getRegistryName();
- String newRegistryName = SolrCoreMetricManager.createRegistryName(core.getCoreDescriptor().getCollectionName(), toName);
+ String newRegistryName = SolrCoreMetricManager.createRegistryName(core, toName);
metricManager.swapRegistries(oldRegistryName, newRegistryName);
registerCore(toName, core, true, false);
SolrCore old = solrCores.remove(name);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
index b2a5c79..8bfa662 100644
--- a/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
+++ b/solr/core/src/java/org/apache/solr/core/JmxMonitoredMap.java
@@ -20,6 +20,7 @@ import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.AttributeNotFoundException;
import javax.management.DynamicMBean;
+import javax.management.InstanceNotFoundException;
import javax.management.InvalidAttributeValueException;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanException;
@@ -53,7 +54,6 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.SolrConfig.JmxConfiguration;
-import org.apache.solr.metrics.SolrCoreMetricManager;
import org.apache.solr.metrics.reporters.JmxObjectNameFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -93,9 +93,10 @@ public class JmxMonitoredMap<K, V> extends
private final String registryName;
- public JmxMonitoredMap(String coreName, String coreHashCode,
+ public JmxMonitoredMap(String coreName, String coreHashCode, String registryName,
final JmxConfiguration jmxConfig) {
this.coreHashCode = coreHashCode;
+ this.registryName = registryName;
jmxRootName = (null != jmxConfig.rootName ?
jmxConfig.rootName
: ("solr" + (null != coreName ? "/" + coreName : "")));
@@ -117,7 +118,6 @@ public class JmxMonitoredMap<K, V> extends
if (servers == null || servers.isEmpty()) {
server = null;
- registryName = null;
nameFactory = null;
log.debug("No JMX servers found, not exposing Solr information with JMX.");
return;
@@ -141,7 +141,6 @@ public class JmxMonitoredMap<K, V> extends
}
server = newServer;
}
- registryName = SolrCoreMetricManager.createRegistryName(null, coreName);
nameFactory = new JmxObjectNameFactory(REPORTER_NAME + coreHashCode, registryName);
}
@@ -166,6 +165,8 @@ public class JmxMonitoredMap<K, V> extends
for (ObjectName name : objectNames) {
try {
server.unregisterMBean(name);
+ } catch (InstanceNotFoundException ie) {
+ // ignore - someone else already deleted this one
} catch (Exception e) {
log.warn("Exception un-registering mbean {}", name, e);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index f22c472..13c3bdd 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -860,6 +860,7 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
this.configSetProperties = configSetProperties;
// Initialize the metrics manager
this.coreMetricManager = initCoreMetricManager(config);
+ this.coreMetricManager.loadReporters();
if (updateHandler == null) {
directoryFactory = initDirectoryFactory();
@@ -1101,13 +1102,12 @@ public final class SolrCore implements SolrInfoMBean, Closeable {
*/
private SolrCoreMetricManager initCoreMetricManager(SolrConfig config) {
SolrCoreMetricManager coreMetricManager = new SolrCoreMetricManager(this);
- coreMetricManager.loadReporters();
return coreMetricManager;
}
private Map<String,SolrInfoMBean> initInfoRegistry(String name, SolrConfig config) {
if (config.jmxConfig.enabled) {
- return new JmxMonitoredMap<String, SolrInfoMBean>(name, String.valueOf(this.hashCode()), config.jmxConfig);
+ return new JmxMonitoredMap<String, SolrInfoMBean>(name, coreMetricManager.getRegistryName(), String.valueOf(this.hashCode()), config.jmxConfig);
} else {
log.debug("JMX monitoring not detected for core: " + name);
return new ConcurrentHashMap<>();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java b/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
index bf77db4..63bdef0 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrInfoMBean.java
@@ -36,9 +36,9 @@ public interface SolrInfoMBean {
SEARCHER, REPLICATION, TLOG, INDEX, DIRECTORY, HTTP, OTHER }
/**
- * Top-level group of beans for a subsystem.
+ * Top-level group of beans or metrics for a subsystem.
*/
- enum Group { jvm, jetty, node, core }
+ enum Group { jvm, jetty, node, core, collection, shard, cluster, overseer }
/**
* Simple common usage name, e.g. BasicQueryHandler,
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
index e41cd8d..951d8d5 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
@@ -451,7 +451,8 @@ public class SolrXmlConfig {
return new PluginInfo[0];
PluginInfo[] configs = new PluginInfo[nodes.getLength()];
for (int i = 0; i < nodes.getLength(); i++) {
- configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, true);
+ // we don't require class in order to support predefined replica and node reporter classes
+ configs[i] = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
}
return configs;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
new file mode 100644
index 0000000..de39a61
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
@@ -0,0 +1,228 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.admin;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.codahale.metrics.MetricRegistry;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.handler.loader.ContentStreamLoader;
+import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.handler.loader.CSVLoader;
+import org.apache.solr.handler.loader.JavabinLoader;
+import org.apache.solr.handler.loader.JsonLoader;
+import org.apache.solr.handler.loader.XMLLoader;
+import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.reporters.solr.SolrReporter;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.update.AddUpdateCommand;
+import org.apache.solr.update.CommitUpdateCommand;
+import org.apache.solr.update.DeleteUpdateCommand;
+import org.apache.solr.update.MergeIndexesCommand;
+import org.apache.solr.update.RollbackUpdateCommand;
+import org.apache.solr.update.processor.UpdateRequestProcessor;
+import org.apache.solr.util.stats.MetricUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Handler to collect and aggregate metric reports. Each report indicates the target registry where
+ * metrics values should be collected and aggregated. Metrics with the same names are
+ * aggregated using {@link AggregateMetric} instances, which track the source of updates and
+ * their count, as well as providing simple statistics over collected values.
+ *
+ * Each report consists of {@link SolrInputDocument}-s that are expected to contain
+ * the following fields:
+ * <ul>
+ * <li>{@link SolrReporter#GROUP_ID} - (required) specifies target registry name where metrics will be grouped.</li>
+ * <li>{@link SolrReporter#REPORTER_ID} - (required) id of the reporter that sent this update. This can be eg.
+ * node name or replica name or other id that uniquely identifies the source of metrics values.</li>
+ * <li>{@link MetricUtils#METRIC_NAME} - (required) metric name (in the source registry)</li>
+ * <li>{@link SolrReporter#LABEL_ID} - (optional) label to prepend to metric names in the target registry.</li>
+ * <li>{@link SolrReporter#REGISTRY_ID} - (optional) name of the source registry.</li>
+ * </ul>
+ * Remaining fields are assumed to be single-valued, and to contain metric attributes and their values. Example:
+ * <pre>
+ * <doc>
+ * <field name="_group_">solr.core.collection1.shard1.leader</field>
+ * <field name="_reporter_">core_node3</field>
+ * <field name="metric">INDEX.merge.errors</field>
+ * <field name="value">0</field>
+ * </doc>
+ * </pre>
+ */
+public class MetricsCollectorHandler extends RequestHandlerBase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final String HANDLER_PATH = "/admin/metrics/collector";
+
+ private final CoreContainer coreContainer;
+ private final SolrMetricManager metricManager;
+ private final Map<String, ContentStreamLoader> loaders = new HashMap<>();
+ private SolrParams params;
+
+ public MetricsCollectorHandler(final CoreContainer coreContainer) {
+ this.coreContainer = coreContainer;
+ this.metricManager = coreContainer.getMetricManager();
+
+ }
+
+ @Override
+ public void init(NamedList initArgs) {
+ super.init(initArgs);
+ if (initArgs != null) {
+ params = SolrParams.toSolrParams(initArgs);
+ } else {
+ params = new ModifiableSolrParams();
+ }
+ loaders.put("application/xml", new XMLLoader().init(params) );
+ loaders.put("application/json", new JsonLoader().init(params) );
+ loaders.put("application/csv", new CSVLoader().init(params) );
+ loaders.put("application/javabin", new JavabinLoader().init(params) );
+ loaders.put("text/csv", loaders.get("application/csv") );
+ loaders.put("text/xml", loaders.get("application/xml") );
+ loaders.put("text/json", loaders.get("application/json"));
+ }
+
+ @Override
+ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
+ if (coreContainer == null || coreContainer.isShutDown()) {
+ // silently drop request
+ return;
+ }
+ //log.info("#### " + req.toString());
+ if (req.getContentStreams() == null) { // no content
+ return;
+ }
+ for (ContentStream cs : req.getContentStreams()) {
+ if (cs.getContentType() == null) {
+ log.warn("Missing content type - ignoring");
+ continue;
+ }
+ ContentStreamLoader loader = loaders.get(cs.getContentType());
+ if (loader == null) {
+ throw new SolrException(SolrException.ErrorCode.UNSUPPORTED_MEDIA_TYPE, "Unsupported content type for stream: " + cs.getSourceInfo() + ", contentType=" + cs.getContentType());
+ }
+ loader.load(req, rsp, cs, new MetricUpdateProcessor(metricManager));
+ }
+ }
+
+ @Override
+ public String getDescription() {
+ return "Handler for collecting and aggregating metric reports.";
+ }
+
+ private static class MetricUpdateProcessor extends UpdateRequestProcessor {
+ private final SolrMetricManager metricManager;
+
+ public MetricUpdateProcessor(SolrMetricManager metricManager) {
+ super(null);
+ this.metricManager = metricManager;
+ }
+
+ @Override
+ public void processAdd(AddUpdateCommand cmd) throws IOException {
+ SolrInputDocument doc = cmd.solrDoc;
+ if (doc == null) {
+ return;
+ }
+ String metricName = (String)doc.getFieldValue(MetricUtils.METRIC_NAME);
+ if (metricName == null) {
+ log.warn("Missing " + MetricUtils.METRIC_NAME + " field in document, skipping: " + doc);
+ return;
+ }
+ doc.remove(MetricUtils.METRIC_NAME);
+ // XXX we could modify keys by using this original registry name
+ doc.remove(SolrReporter.REGISTRY_ID);
+ String groupId = (String)doc.getFieldValue(SolrReporter.GROUP_ID);
+ if (groupId == null) {
+ log.warn("Missing " + SolrReporter.GROUP_ID + " field in document, skipping: " + doc);
+ return;
+ }
+ doc.remove(SolrReporter.GROUP_ID);
+ String reporterId = (String)doc.getFieldValue(SolrReporter.REPORTER_ID);
+ if (reporterId == null) {
+ log.warn("Missing " + SolrReporter.REPORTER_ID + " field in document, skipping: " + doc);
+ return;
+ }
+ doc.remove(SolrReporter.REPORTER_ID);
+ String labelId = (String)doc.getFieldValue(SolrReporter.LABEL_ID);
+ doc.remove(SolrReporter.LABEL_ID);
+ doc.forEach(f -> {
+ String key = MetricRegistry.name(labelId, metricName, f.getName());
+ MetricRegistry registry = metricManager.registry(groupId);
+ AggregateMetric metric = getOrRegister(registry, key, new AggregateMetric());
+ Object o = f.getFirstValue();
+ if (o != null) {
+ metric.set(reporterId, o);
+ } else {
+ // remove missing values
+ metric.clear(reporterId);
+ }
+ });
+ }
+
+ private AggregateMetric getOrRegister(MetricRegistry registry, String name, AggregateMetric add) {
+ AggregateMetric existing = (AggregateMetric)registry.getMetrics().get(name);
+ if (existing != null) {
+ return existing;
+ }
+ try {
+ registry.register(name, add);
+ return add;
+ } catch (IllegalArgumentException e) {
+ // someone added before us
+ existing = (AggregateMetric)registry.getMetrics().get(name);
+ if (existing == null) { // now, that is weird...
+ throw new IllegalArgumentException("Inconsistent metric status, " + name);
+ }
+ return existing;
+ }
+ }
+
+ @Override
+ public void processDelete(DeleteUpdateCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processDelete");
+ }
+
+ @Override
+ public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processMergeIndexes");
+ }
+
+ @Override
+ public void processCommit(CommitUpdateCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processCommit");
+ }
+
+ @Override
+ public void processRollback(RollbackUpdateCommand cmd) throws IOException {
+ throw new UnsupportedOperationException("processRollback");
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index 385317b..b53c818 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -79,7 +79,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
NamedList response = new NamedList();
for (String registryName : requestedRegistries) {
MetricRegistry registry = metricManager.registry(registryName);
- response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter));
+ response.add(registryName, MetricUtils.toNamedList(registry, metricFilters, mustMatchFilter, false, false, null));
}
rsp.getValues().add("metrics", response);
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java b/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java
new file mode 100644
index 0000000..babc99d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/AggregateMetric.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import com.codahale.metrics.Metric;
+
+/**
+ * This class is used for keeping several partial named values and providing useful statistics over them.
+ */
+public class AggregateMetric implements Metric {
+
+ /**
+ * Simple class to represent current value and how many times it was set.
+ */
+ public static class Update {
+ public Object value;
+ public final AtomicInteger updateCount = new AtomicInteger();
+
+ public Update(Object value) {
+ update(value);
+ }
+
+ public void update(Object value) {
+ this.value = value;
+ updateCount.incrementAndGet();
+ }
+
+ @Override
+ public String toString() {
+ return "Update{" +
+ "value=" + value +
+ ", updateCount=" + updateCount +
+ '}';
+ }
+ }
+
+ private final Map<String, Update> values = new ConcurrentHashMap<>();
+
+ public void set(String name, Object value) {
+ final Update existing = values.get(name);
+ if (existing == null) {
+ final Update created = new Update(value);
+ final Update raced = values.putIfAbsent(name, created);
+ if (raced != null) {
+ raced.update(value);
+ }
+ } else {
+ existing.update(value);
+ }
+ }
+
+ public void clear(String name) {
+ values.remove(name);
+ }
+
+ public void clear() {
+ values.clear();
+ }
+
+ public int size() {
+ return values.size();
+ }
+
+ public boolean isEmpty() {
+ return values.isEmpty();
+ }
+
+ public Map<String, Update> getValues() {
+ return Collections.unmodifiableMap(values);
+ }
+
+ // --------- stats ---------
+ public double getMax() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ Double res = null;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ if (res == null) {
+ res = n.doubleValue();
+ continue;
+ }
+ if (n.doubleValue() > res) {
+ res = n.doubleValue();
+ }
+ }
+ return res;
+ }
+
+ public double getMin() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ Double res = null;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ if (res == null) {
+ res = n.doubleValue();
+ continue;
+ }
+ if (n.doubleValue() < res) {
+ res = n.doubleValue();
+ }
+ }
+ return res;
+ }
+
+ public double getMean() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ double total = 0;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ total += n.doubleValue();
+ }
+ return total / values.size();
+ }
+
+ public double getStdDev() {
+ int size = values.size();
+ if (size < 2) {
+ return 0;
+ }
+ final double mean = getMean();
+ double sum = 0;
+ int count = 0;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ count++;
+ Number n = (Number)u.value;
+ final double diff = n.doubleValue() - mean;
+ sum += diff * diff;
+ }
+ if (count < 2) {
+ return 0;
+ }
+ final double variance = sum / (count - 1);
+ return Math.sqrt(variance);
+ }
+
+ public double getSum() {
+ if (values.isEmpty()) {
+ return 0;
+ }
+ double res = 0;
+ for (Update u : values.values()) {
+ if (!(u.value instanceof Number)) {
+ continue;
+ }
+ Number n = (Number)u.value;
+ res += n.doubleValue();
+ }
+ return res;
+ }
+
+ @Override
+ public String toString() {
+ return "AggregateMetric{" +
+ "size=" + size() +
+ ", max=" + getMax() +
+ ", min=" + getMin() +
+ ", mean=" + getMean() +
+ ", stddev=" + getStdDev() +
+ ", sum=" + getSum() +
+ ", values=" + values +
+ '}';
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
index eb5b687..43f3535 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrCoreMetricManager.java
@@ -20,6 +20,7 @@ import java.io.Closeable;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.core.NodeConfig;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
@@ -36,8 +37,14 @@ public class SolrCoreMetricManager implements Closeable {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrCore core;
+ private final String tag;
private final SolrMetricManager metricManager;
private String registryName;
+ private String collectionName;
+ private String shardName;
+ private String replicaName;
+ private String leaderRegistryName;
+ private boolean cloudMode;
/**
* Constructs a metric manager.
@@ -46,8 +53,26 @@ public class SolrCoreMetricManager implements Closeable {
*/
public SolrCoreMetricManager(SolrCore core) {
this.core = core;
+ this.tag = String.valueOf(core.hashCode());
this.metricManager = core.getCoreDescriptor().getCoreContainer().getMetricManager();
- registryName = createRegistryName(core.getCoreDescriptor().getCollectionName(), core.getName());
+ initCloudMode();
+ registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+ leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
+ }
+
+ private void initCloudMode() {
+ CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
+ if (cd != null) {
+ cloudMode = true;
+ collectionName = core.getCoreDescriptor().getCollectionName();
+ shardName = cd.getShardId();
+ //replicaName = cd.getCoreNodeName();
+ String coreName = core.getName();
+ replicaName = parseReplicaName(collectionName, coreName);
+ if (replicaName == null) {
+ replicaName = cd.getCoreNodeName();
+ }
+ }
}
/**
@@ -57,7 +82,11 @@ public class SolrCoreMetricManager implements Closeable {
public void loadReporters() {
NodeConfig nodeConfig = core.getCoreDescriptor().getCoreContainer().getConfig();
PluginInfo[] pluginInfos = nodeConfig.getMetricReporterPlugins();
- metricManager.loadReporters(pluginInfos, core.getResourceLoader(), SolrInfoMBean.Group.core, registryName);
+ metricManager.loadReporters(pluginInfos, core.getResourceLoader(), tag,
+ SolrInfoMBean.Group.core, registryName);
+ if (cloudMode) {
+ metricManager.loadShardReporters(pluginInfos, core);
+ }
}
/**
@@ -67,12 +96,18 @@ public class SolrCoreMetricManager implements Closeable {
*/
public void afterCoreSetName() {
String oldRegistryName = registryName;
- registryName = createRegistryName(core.getCoreDescriptor().getCollectionName(), core.getName());
+ String oldLeaderRegistryName = leaderRegistryName;
+ initCloudMode();
+ registryName = createRegistryName(cloudMode, collectionName, shardName, replicaName, core.getName());
+ leaderRegistryName = createLeaderRegistryName(cloudMode, collectionName, shardName);
if (oldRegistryName.equals(registryName)) {
return;
}
// close old reporters
- metricManager.closeReporters(oldRegistryName);
+ metricManager.closeReporters(oldRegistryName, tag);
+ if (oldLeaderRegistryName != null) {
+ metricManager.closeReporters(oldLeaderRegistryName, tag);
+ }
// load reporters again, using the new core name
loadReporters();
}
@@ -96,7 +131,7 @@ public class SolrCoreMetricManager implements Closeable {
*/
@Override
public void close() throws IOException {
- metricManager.closeReporters(getRegistryName());
+ metricManager.closeReporters(getRegistryName(), tag);
}
public SolrCore getCore() {
@@ -104,7 +139,7 @@ public class SolrCoreMetricManager implements Closeable {
}
/**
- * Retrieves the metric registry name of the manager.
+ * Metric registry name of the manager.
*
* In order to make it easier for reporting tools to aggregate metrics from
* different cores that logically belong to a single collection we convert the
@@ -124,22 +159,74 @@ public class SolrCoreMetricManager implements Closeable {
return registryName;
}
- public static String createRegistryName(String collectionName, String coreName) {
- if (collectionName == null || (collectionName != null && !coreName.startsWith(collectionName + "_"))) {
- // single core, or unknown naming scheme
+ /**
+ * Metric registry name for leader metrics. This is null if not in cloud mode.
+ * @return metric registry name for leader metrics
+ */
+ public String getLeaderRegistryName() {
+ return leaderRegistryName;
+ }
+
+ /**
+ * Return a tag specific to this instance.
+ */
+ public String getTag() {
+ return tag;
+ }
+
+ public static String createRegistryName(boolean cloud, String collectionName, String shardName, String replicaName, String coreName) {
+ if (cloud) { // build registry name from logical names
+ return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shardName, replicaName);
+ } else {
return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, coreName);
}
- // split "collection1_shard1_1_replica1" into parts
- String str = coreName.substring(collectionName.length() + 1);
- String shard;
- String replica = null;
- int pos = str.lastIndexOf("_replica");
- if (pos == -1) { // ?? no _replicaN part ??
- shard = str;
+ }
+
+ /**
+ * This method is used by {@link org.apache.solr.core.CoreContainer#rename(String, String)}.
+ * @param aCore existing core with old name
+ * @param coreName new name
+ * @return new registry name
+ */
+ public static String createRegistryName(SolrCore aCore, String coreName) {
+ CloudDescriptor cd = aCore.getCoreDescriptor().getCloudDescriptor();
+ String replicaName = null;
+ if (cd != null) {
+ replicaName = parseReplicaName(cd.getCollectionName(), coreName);
+ }
+ return createRegistryName(
+ cd != null,
+ cd != null ? cd.getCollectionName() : null,
+ cd != null ? cd.getShardId() : null,
+ replicaName,
+ coreName
+ );
+ }
+
+ public static String parseReplicaName(String collectionName, String coreName) {
+ if (collectionName == null || !coreName.startsWith(collectionName)) {
+ return null;
+ } else {
+ // split "collection1_shard1_1_replica1" into parts
+ if (coreName.length() > collectionName.length()) {
+ String str = coreName.substring(collectionName.length() + 1);
+ int pos = str.lastIndexOf("_replica");
+ if (pos == -1) { // ?? no _replicaN part ??
+ return str;
+ } else {
+ return str.substring(pos + 1);
+ }
+ } else {
+ return null;
+ }
+ }
+ }
+
+ public static String createLeaderRegistryName(boolean cloud, String collectionName, String shardName) {
+ if (cloud) {
+ return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.collection, collectionName, shardName, "leader");
} else {
- shard = str.substring(0, pos);
- replica = str.substring(pos + 1);
+ return null;
}
- return SolrMetricManager.getRegistryName(SolrInfoMBean.Group.core, collectionName, shard, replica);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index cac5389..3a4c3fe 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -18,9 +18,13 @@ package org.apache.solr.metrics;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
@@ -29,6 +33,9 @@ import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.stream.Collectors;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
@@ -39,9 +46,14 @@ import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.MetricSet;
import com.codahale.metrics.SharedMetricRegistries;
import com.codahale.metrics.Timer;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.metrics.reporters.solr.SolrClusterReporter;
+import org.apache.solr.metrics.reporters.solr.SolrShardReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -87,27 +99,39 @@ public class SolrMetricManager {
private final Lock reportersLock = new ReentrantLock();
private final Lock swapLock = new ReentrantLock();
+ public static final int DEFAULT_CLOUD_REPORTER_PERIOD = 60;
+
public SolrMetricManager() { }
/**
* An implementation of {@link MetricFilter} that selects metrics
- * with names that start with a prefix.
+ * with names that start with one of prefixes.
*/
public static class PrefixFilter implements MetricFilter {
- private final String[] prefixes;
+ private final Set<String> prefixes = new HashSet<>();
private final Set<String> matched = new HashSet<>();
private boolean allMatch = false;
/**
- * Create a filter that uses the provided prefix.
+ * Create a filter that uses the provided prefixes.
* @param prefixes prefixes to use, must not be null. If empty then any
* name will match, if not empty then match on any prefix will
* succeed (logical OR).
*/
public PrefixFilter(String... prefixes) {
Objects.requireNonNull(prefixes);
- this.prefixes = prefixes;
- if (prefixes.length == 0) {
+ if (prefixes.length > 0) {
+ this.prefixes.addAll(Arrays.asList(prefixes));
+ }
+ if (this.prefixes.isEmpty()) {
+ allMatch = true;
+ }
+ }
+
+ public PrefixFilter(Collection<String> prefixes) {
+ Objects.requireNonNull(prefixes);
+ this.prefixes.addAll(prefixes);
+ if (this.prefixes.isEmpty()) {
allMatch = true;
}
}
@@ -141,6 +165,85 @@ public class SolrMetricManager {
public void reset() {
matched.clear();
}
+
+ @Override
+ public String toString() {
+ return "PrefixFilter{" +
+ "prefixes=" + prefixes +
+ '}';
+ }
+ }
+
+ /**
+ * An implementation of {@link MetricFilter} that selects metrics
+ * with names that match regular expression patterns.
+ */
+ public static class RegexFilter implements MetricFilter {
+ private final Set<Pattern> compiledPatterns = new HashSet<>();
+ private final Set<String> matched = new HashSet<>();
+ private boolean allMatch = false;
+
+ /**
+ * Create a filter that uses the provided prefix.
+ * @param patterns regex patterns to use, must not be null. If empty then any
+ * name will match, if not empty then match on any pattern will
+ * succeed (logical OR).
+ */
+ public RegexFilter(String... patterns) throws PatternSyntaxException {
+ this(patterns != null ? Arrays.asList(patterns) : Collections.emptyList());
+ }
+
+ public RegexFilter(Collection<String> patterns) throws PatternSyntaxException {
+ Objects.requireNonNull(patterns);
+ if (patterns.isEmpty()) {
+ allMatch = true;
+ return;
+ }
+ patterns.forEach(p -> {
+ Pattern pattern = Pattern.compile(p);
+ compiledPatterns.add(pattern);
+ });
+ if (patterns.isEmpty()) {
+ allMatch = true;
+ }
+ }
+
+ @Override
+ public boolean matches(String name, Metric metric) {
+ if (allMatch) {
+ matched.add(name);
+ return true;
+ }
+ for (Pattern p : compiledPatterns) {
+ if (p.matcher(name).matches()) {
+ matched.add(name);
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Return the set of names that matched this filter.
+ * @return matching names
+ */
+ public Set<String> getMatched() {
+ return Collections.unmodifiableSet(matched);
+ }
+
+ /**
+ * Clear the set of names that matched.
+ */
+ public void reset() {
+ matched.clear();
+ }
+
+ @Override
+ public String toString() {
+ return "RegexFilter{" +
+ "compiledPatterns=" + compiledPatterns +
+ '}';
+ }
}
/**
@@ -150,7 +253,40 @@ public class SolrMetricManager {
Set<String> set = new HashSet<>();
set.addAll(registries.keySet());
set.addAll(SharedMetricRegistries.names());
- return Collections.unmodifiableSet(set);
+ return set;
+ }
+
+ /**
+ * Return set of existing registry names that match a regex pattern
+ * @param patterns regex patterns. NOTE: users need to make sure that patterns that
+ * don't start with a wildcard use the full registry name starting with
+ * {@link #REGISTRY_NAME_PREFIX}
+ * @return set of existing registry names where at least one pattern matched.
+ */
+ public Set<String> registryNames(String... patterns) throws PatternSyntaxException {
+ if (patterns == null || patterns.length == 0) {
+ return registryNames();
+ }
+ List<Pattern> compiled = new ArrayList<>();
+ for (String pattern : patterns) {
+ compiled.add(Pattern.compile(pattern));
+ }
+ return registryNames((Pattern[])compiled.toArray(new Pattern[compiled.size()]));
+ }
+
+ public Set<String> registryNames(Pattern... patterns) {
+ Set<String> allNames = registryNames();
+ if (patterns == null || patterns.length == 0) {
+ return allNames;
+ }
+ return allNames.stream().filter(s -> {
+ for (Pattern p : patterns) {
+ if (p.matcher(s).matches()) {
+ return true;
+ }
+ }
+ return false;
+ }).collect(Collectors.toSet());
}
/**
@@ -209,7 +345,7 @@ public class SolrMetricManager {
*/
public void removeRegistry(String registry) {
// close any reporters for this registry first
- closeReporters(registry);
+ closeReporters(registry, null);
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
if (isSharedRegistry(registry)) {
@@ -490,10 +626,12 @@ public class SolrMetricManager {
* the list. If both attributes are present then only "group" attribute will be processed.
* @param pluginInfos plugin configurations
* @param loader resource loader
+ * @param tag optional tag for the reporters, to distinguish reporters logically created for different parent
+ * component instances.
* @param group selected group, not null
* @param registryNames optional child registry name elements
*/
- public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, SolrInfoMBean.Group group, String... registryNames) {
+ public void loadReporters(PluginInfo[] pluginInfos, SolrResourceLoader loader, String tag, SolrInfoMBean.Group group, String... registryNames) {
if (pluginInfos == null || pluginInfos.length == 0) {
return;
}
@@ -533,7 +671,7 @@ public class SolrMetricManager {
}
}
try {
- loadReporter(registryName, loader, info);
+ loadReporter(registryName, loader, info, tag);
} catch (Exception e) {
log.warn("Error loading metrics reporter, plugin info: " + info, e);
}
@@ -545,9 +683,12 @@ public class SolrMetricManager {
* @param registry reporter is associated with this registry
* @param loader loader to use when creating an instance of the reporter
* @param pluginInfo plugin configuration. Plugin "name" and "class" attributes are required.
+ * @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
+ * component instances.
+ * @return instance of newly created and registered reporter
* @throws Exception if any argument is missing or invalid
*/
- public void loadReporter(String registry, SolrResourceLoader loader, PluginInfo pluginInfo) throws Exception {
+ public SolrMetricReporter loadReporter(String registry, SolrResourceLoader loader, PluginInfo pluginInfo, String tag) throws Exception {
if (registry == null || pluginInfo == null || pluginInfo.name == null || pluginInfo.className == null) {
throw new IllegalArgumentException("loadReporter called with missing arguments: " +
"registry=" + registry + ", loader=" + loader + ", pluginInfo=" + pluginInfo);
@@ -558,14 +699,19 @@ public class SolrMetricManager {
pluginInfo.className,
SolrMetricReporter.class,
new String[0],
- new Class[] { SolrMetricManager.class, String.class },
- new Object[] { this, registry }
+ new Class[]{SolrMetricManager.class, String.class},
+ new Object[]{this, registry}
);
try {
reporter.init(pluginInfo);
} catch (IllegalStateException e) {
throw new IllegalArgumentException("reporter init failed: " + pluginInfo, e);
}
+ registerReporter(registry, pluginInfo.name, tag, reporter);
+ return reporter;
+ }
+
+ private void registerReporter(String registry, String name, String tag, SolrMetricReporter reporter) throws Exception {
try {
if (!reportersLock.tryLock(10, TimeUnit.SECONDS)) {
throw new Exception("Could not obtain lock to modify reporters registry: " + registry);
@@ -579,12 +725,15 @@ public class SolrMetricManager {
perRegistry = new HashMap<>();
reporters.put(registry, perRegistry);
}
- SolrMetricReporter oldReporter = perRegistry.get(pluginInfo.name);
+ if (tag != null && !tag.isEmpty()) {
+ name = name + "@" + tag;
+ }
+ SolrMetricReporter oldReporter = perRegistry.get(name);
if (oldReporter != null) { // close it
- log.info("Replacing existing reporter '" + pluginInfo.name + "' in registry '" + registry + "': " + oldReporter.toString());
+ log.info("Replacing existing reporter '" + name + "' in registry '" + registry + "': " + oldReporter.toString());
oldReporter.close();
}
- perRegistry.put(pluginInfo.name, reporter);
+ perRegistry.put(name, reporter);
} finally {
reportersLock.unlock();
@@ -595,9 +744,11 @@ public class SolrMetricManager {
* Close and unregister a named {@link SolrMetricReporter} for a registry.
* @param registry registry name
* @param name reporter name
+ * @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
+ * component instances.
* @return true if a named reporter existed and was closed.
*/
- public boolean closeReporter(String registry, String name) {
+ public boolean closeReporter(String registry, String name, String tag) {
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
try {
@@ -614,6 +765,9 @@ public class SolrMetricManager {
if (perRegistry == null) {
return false;
}
+ if (tag != null && !tag.isEmpty()) {
+ name = name + "@" + tag;
+ }
SolrMetricReporter reporter = perRegistry.remove(name);
if (reporter == null) {
return false;
@@ -635,6 +789,17 @@ public class SolrMetricManager {
* @return names of closed reporters
*/
public Set<String> closeReporters(String registry) {
+ return closeReporters(registry, null);
+ }
+
+ /**
+ * Close and unregister all {@link SolrMetricReporter}-s for a registry.
+ * @param registry registry name
+ * @param tag optional tag for the reporter, to distinguish reporters logically created for different parent
+ * component instances.
+ * @return names of closed reporters
+ */
+ public Set<String> closeReporters(String registry, String tag) {
// make sure we use a name with prefix, with overrides
registry = overridableRegistryName(registry);
try {
@@ -646,18 +811,28 @@ public class SolrMetricManager {
log.warn("Interrupted while trying to obtain lock to modify reporters registry: " + registry);
return Collections.emptySet();
}
- log.info("Closing metric reporters for: " + registry);
+ log.info("Closing metric reporters for registry=" + registry + ", tag=" + tag);
try {
- Map<String, SolrMetricReporter> perRegistry = reporters.remove(registry);
+ Map<String, SolrMetricReporter> perRegistry = reporters.get(registry);
if (perRegistry != null) {
- for (SolrMetricReporter reporter : perRegistry.values()) {
+ Set<String> names = new HashSet<>(perRegistry.keySet());
+ Set<String> removed = new HashSet<>();
+ names.forEach(name -> {
+ if (tag != null && !tag.isEmpty() && !name.endsWith("@" + tag)) {
+ return;
+ }
+ SolrMetricReporter reporter = perRegistry.remove(name);
try {
reporter.close();
} catch (IOException ioe) {
log.warn("Exception closing reporter " + reporter, ioe);
}
+ removed.add(name);
+ });
+ if (removed.size() == names.size()) {
+ reporters.remove(registry);
}
- return perRegistry.keySet();
+ return removed;
} else {
return Collections.emptySet();
}
@@ -695,4 +870,114 @@ public class SolrMetricManager {
reportersLock.unlock();
}
}
+
+ private List<PluginInfo> prepareCloudPlugins(PluginInfo[] pluginInfos, String group, String className,
+ Map<String, String> defaultAttributes,
+ Map<String, Object> defaultInitArgs,
+ PluginInfo defaultPlugin) {
+ List<PluginInfo> result = new ArrayList<>();
+ if (pluginInfos == null) {
+ pluginInfos = new PluginInfo[0];
+ }
+ for (PluginInfo info : pluginInfos) {
+ String groupAttr = info.attributes.get("group");
+ if (!group.equals(groupAttr)) {
+ continue;
+ }
+ info = preparePlugin(info, className, defaultAttributes, defaultInitArgs);
+ if (info != null) {
+ result.add(info);
+ }
+ }
+ if (result.isEmpty() && defaultPlugin != null) {
+ defaultPlugin = preparePlugin(defaultPlugin, className, defaultAttributes, defaultInitArgs);
+ if (defaultPlugin != null) {
+ result.add(defaultPlugin);
+ }
+ }
+ return result;
+ }
+
+ private PluginInfo preparePlugin(PluginInfo info, String className, Map<String, String> defaultAttributes,
+ Map<String, Object> defaultInitArgs) {
+ if (info == null) {
+ return null;
+ }
+ String classNameAttr = info.attributes.get("class");
+ if (className != null) {
+ if (classNameAttr != null && !className.equals(classNameAttr)) {
+ log.warn("Conflicting class name attributes, expected " + className + " but was " + classNameAttr + ", skipping " + info);
+ return null;
+ }
+ }
+
+ Map<String, String> attrs = new HashMap<>(info.attributes);
+ defaultAttributes.forEach((k, v) -> {
+ if (!attrs.containsKey(k)) {
+ attrs.put(k, v);
+ }
+ });
+ attrs.put("class", className);
+ Map<String, Object> initArgs = new HashMap<>();
+ if (info.initArgs != null) {
+ initArgs.putAll(info.initArgs.asMap(10));
+ }
+ defaultInitArgs.forEach((k, v) -> {
+ if (!initArgs.containsKey(k)) {
+ initArgs.put(k, v);
+ }
+ });
+ return new PluginInfo(info.type, attrs, new NamedList(initArgs), null);
+ }
+
+ public void loadShardReporters(PluginInfo[] pluginInfos, SolrCore core) {
+ // don't load for non-cloud cores
+ if (core.getCoreDescriptor().getCloudDescriptor() == null) {
+ return;
+ }
+ // prepare default plugin if none present in the config
+ Map<String, String> attrs = new HashMap<>();
+ attrs.put("name", "shardDefault");
+ attrs.put("group", SolrInfoMBean.Group.shard.toString());
+ Map<String, Object> initArgs = new HashMap<>();
+ initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
+
+ String registryName = core.getCoreMetricManager().getRegistryName();
+ // collect infos and normalize
+ List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.shard.toString(), SolrShardReporter.class.getName(),
+ attrs, initArgs, null);
+ for (PluginInfo info : infos) {
+ try {
+ SolrMetricReporter reporter = loadReporter(registryName, core.getResourceLoader(), info,
+ String.valueOf(core.hashCode()));
+ ((SolrShardReporter)reporter).setCore(core);
+ } catch (Exception e) {
+ log.warn("Could not load shard reporter, pluginInfo=" + info, e);
+ }
+ }
+ }
+
+ public void loadClusterReporters(PluginInfo[] pluginInfos, CoreContainer cc) {
+ // don't load for non-cloud instances
+ if (!cc.isZooKeeperAware()) {
+ return;
+ }
+ Map<String, String> attrs = new HashMap<>();
+ attrs.put("name", "clusterDefault");
+ attrs.put("group", SolrInfoMBean.Group.cluster.toString());
+ Map<String, Object> initArgs = new HashMap<>();
+ initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD);
+ List<PluginInfo> infos = prepareCloudPlugins(pluginInfos, SolrInfoMBean.Group.cluster.toString(), SolrClusterReporter.class.getName(),
+ attrs, initArgs, null);
+ String registryName = getRegistryName(SolrInfoMBean.Group.cluster);
+ for (PluginInfo info : infos) {
+ try {
+ SolrMetricReporter reporter = loadReporter(registryName, cc.getResourceLoader(), info, null);
+ ((SolrClusterReporter)reporter).setCoreContainer(cc);
+ } catch (Exception e) {
+ log.warn("Could not load node reporter, pluginInfo=" + info, e);
+ }
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java b/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
index 4df5257..1f5b4f0 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/JmxObjectNameFactory.java
@@ -41,9 +41,9 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
* @param additionalProperties additional properties as key, value pairs.
*/
public JmxObjectNameFactory(String reporterName, String domain, String... additionalProperties) {
- this.reporterName = reporterName;
+ this.reporterName = reporterName.replaceAll(":", "_");
this.domain = domain;
- this.subdomains = domain.split("\\.");
+ this.subdomains = domain.replaceAll(":", "_").split("\\.");
if (additionalProperties != null && (additionalProperties.length % 2) != 0) {
throw new IllegalArgumentException("additionalProperties length must be even: " + Arrays.toString(additionalProperties));
}
@@ -83,7 +83,7 @@ public class JmxObjectNameFactory implements ObjectNameFactory {
}
sb.append(','); // separate from other properties
} else {
- sb.append(currentDomain);
+ sb.append(currentDomain.replaceAll(":", "_"));
sb.append(':');
}
} else {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d7bc947/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
new file mode 100644
index 0000000..846e805
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.metrics.reporters.solr;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.MalformedURLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Supplier;
+
+import org.apache.http.client.HttpClient;
+import org.apache.solr.cloud.Overseer;
+import org.apache.solr.cloud.ZkController;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrInfoMBean;
+import org.apache.solr.handler.admin.MetricsCollectorHandler;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricReporter;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This reporter sends selected metrics from local registries to {@link Overseer}.
+ * <p>The following configuration properties are supported:</p>
+ * <ul>
+ * <li>handler - (optional str) handler path where reports are sent. Default is
+ * {@link MetricsCollectorHandler#HANDLER_PATH}.</li>
+ * <li>period - (optional int) how often reports are sent, in seconds. Default is 60. Setting this
+ * to 0 disables the reporter.</li>
+ * <li>report - (optional multiple lst) report configuration(s), see below.</li>
+ * </ul>
+ * Each report configuration consist of the following properties:
+ * <ul>
+ * <li>registry - (required str) regex pattern matching source registries (see {@link SolrMetricManager#registryNames(String...)}),
+ * may contain capture groups.</li>
+ * <li>group - (required str) target registry name where metrics will be grouped. This can be a regex pattern that
+ * contains back-references to capture groups collected by <code>registry</code> pattern</li>
+ * <li>label - (optional str) optional prefix to prepend to metric names, may contain back-references to
+ * capture groups collected by <code>registry</code> pattern</li>
+ * <li>filter - (optional multiple str) regex expression(s) matching selected metrics to be reported.</li>
+ * </ul>
+ * NOTE: this reporter uses predefined "overseer" group, and it's always created even if explicit configuration
+ * is missing. Default configuration uses report specifications from {@link #DEFAULT_REPORTS}.
+ * <p>Example configuration:</p>
+ * <pre>
+ * <reporter name="test" group="overseer">
+ * <str name="handler">/admin/metrics/collector</str>
+ * <int name="period">11</int>
+ * <lst name="report">
+ * <str name="group">overseer</str>
+ * <str name="label">jvm</str>
+ * <str name="registry">solr\.jvm</str>
+ * <str name="filter">memory\.total\..*</str>
+ * <str name="filter">memory\.heap\..*</str>
+ * <str name="filter">os\.SystemLoadAverage</str>
+ * <str name="filter">threads\.count</str>
+ * </lst>
+ * <lst name="report">
+ * <str name="group">overseer</str>
+ * <str name="label">leader.$1</str>
+ * <str name="registry">solr\.core\.(.*)\.leader</str>
+ * <str name="filter">UPDATE\./update/.*</str>
+ * </lst>
+ * </reporter>
+ * </pre>
+ *
+ */
+public class SolrClusterReporter extends SolrMetricReporter {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ public static final String CLUSTER_GROUP = SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.cluster.toString());
+
+ public static final List<SolrReporter.Report> DEFAULT_REPORTS = new ArrayList<SolrReporter.Report>() {{
+ add(new SolrReporter.Report(CLUSTER_GROUP, "jetty",
+ SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jetty.toString()),
+ Collections.emptySet())); // all metrics
+ add(new SolrReporter.Report(CLUSTER_GROUP, "jvm",
+ SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.jvm.toString()),
+ new HashSet<String>() {{
+ add("memory\\.total\\..*");
+ add("memory\\.heap\\..*");
+ add("os\\.SystemLoadAverage");
+ add("os\\.FreePhysicalMemorySize");
+ add("os\\.FreeSwapSpaceSize");
+ add("os\\.OpenFileDescriptorCount");
+ add("threads\\.count");
+ }})); // all metrics
+ // XXX anything interesting here?
+ //add(new SolrReporter.Specification(OVERSEER_GROUP, "node", SolrMetricManager.overridableRegistryName(SolrInfoMBean.Group.node.toString()),
+ // Collections.emptySet())); // all metrics
+ add(new SolrReporter.Report(CLUSTER_GROUP, "leader.$1", "solr\\.collection\\.(.*)\\.leader",
+ new HashSet<String>(){{
+ add("UPDATE\\./update/.*");
+ add("QUERY\\./select.*");
+ add("INDEX\\..*");
+ add("TLOG\\..*");
+ }}));
+ }};
+
+ private String handler = MetricsCollectorHandler.HANDLER_PATH;
+ private int period = SolrMetricManager.DEFAULT_CLOUD_REPORTER_PERIOD;
+ private List<SolrReporter.Report> reports = new ArrayList<>();
+
+ private SolrReporter reporter;
+
+ /**
+ * Create a reporter for metrics managed in a named registry.
+ *
+ * @param metricManager metric manager
+ * @param registryName this is ignored
+ */
+ public SolrClusterReporter(SolrMetricManager metricManager, String registryName) {
+ super(metricManager, registryName);
+ }
+
+ public void setHandler(String handler) {
+ this.handler = handler;
+ }
+
+ public void setPeriod(int period) {
+ this.period = period;
+ }
+
+ public void setReport(List<Map> reportConfig) {
+ if (reportConfig == null || reportConfig.isEmpty()) {
+ return;
+ }
+ reportConfig.forEach(map -> {
+ SolrReporter.Report r = SolrReporter.Report.fromMap(map);
+ if (r != null) {
+ reports.add(r);
+ }
+ });
+ }
+
+ // for unit tests
+ int getPeriod() {
+ return period;
+ }
+
+ List<SolrReporter.Report> getReports() {
+ return reports;
+ }
+
+ @Override
+ protected void validate() throws IllegalStateException {
+ if (period < 1) {
+ log.info("Turning off node reporter, period=" + period);
+ }
+ if (reports.isEmpty()) { // set defaults
+ reports = DEFAULT_REPORTS;
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (reporter != null) {
+ reporter.close();;
+ }
+ }
+
+ public void setCoreContainer(CoreContainer cc) {
+ if (reporter != null) {
+ reporter.close();;
+ }
+ // start reporter only in cloud mode
+ if (!cc.isZooKeeperAware()) {
+ log.warn("Not ZK-aware, not starting...");
+ return;
+ }
+ if (period < 1) { // don't start it
+ return;
+ }
+ HttpClient httpClient = cc.getUpdateShardHandler().getHttpClient();
+ ZkController zk = cc.getZkController();
+ String reporterId = zk.getNodeName();
+ reporter = SolrReporter.Builder.forReports(metricManager, reports)
+ .convertRatesTo(TimeUnit.SECONDS)
+ .convertDurationsTo(TimeUnit.MILLISECONDS)
+ .withHandler(handler)
+ .withReporterId(reporterId)
+ .cloudClient(false) // we want to send reports specifically to a selected leader instance
+ .skipAggregateValues(true) // we don't want to transport details of aggregates
+ .skipHistograms(true) // we don't want to transport histograms
+ .build(httpClient, new OverseerUrlSupplier(zk));
+
+ reporter.start(period, TimeUnit.SECONDS);
+ }
+
+ // TODO: fix this when there is an elegant way to retrieve URL of a node that runs Overseer leader.
+ // package visibility for unit tests
+ static class OverseerUrlSupplier implements Supplier<String> {
+ private static final long DEFAULT_INTERVAL = 30000000; // 30s
+ private ZkController zk;
+ private String lastKnownUrl = null;
+ private long lastCheckTime = 0;
+ private long interval = DEFAULT_INTERVAL;
+
+ OverseerUrlSupplier(ZkController zk) {
+ this.zk = zk;
+ }
+
+ @Override
+ public String get() {
+ if (zk == null) {
+ return null;
+ }
+ // primitive caching for lastKnownUrl
+ long now = System.nanoTime();
+ if (lastKnownUrl != null && (now - lastCheckTime) < interval) {
+ return lastKnownUrl;
+ }
+ if (!zk.isConnected()) {
+ return lastKnownUrl;
+ }
+ lastCheckTime = now;
+ SolrZkClient zkClient = zk.getZkClient();
+ ZkNodeProps props;
+ try {
+ props = ZkNodeProps.load(zkClient.getData(
+ Overseer.OVERSEER_ELECT + "/leader", null, null, true));
+ } catch (KeeperException e) {
+ log.warn("Could not obtain overseer's address, skipping.", e);
+ return lastKnownUrl;
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ return lastKnownUrl;
+ }
+ if (props == null) {
+ return lastKnownUrl;
+ }
+ String oid = props.getStr("id");
+ if (oid == null) {
+ return lastKnownUrl;
+ }
+ String[] ids = oid.split("-");
+ if (ids.length != 3) { // unknown format
+ log.warn("Unknown format of leader id, skipping: " + oid);
+ return lastKnownUrl;
+ }
+ // convert nodeName back to URL
+ String url = zk.getZkStateReader().getBaseUrlForNodeName(ids[1]);
+ // check that it's parseable
+ try {
+ new java.net.URL(url);
+ } catch (MalformedURLException mue) {
+ log.warn("Malformed Overseer's leader URL: url", mue);
+ return lastKnownUrl;
+ }
+ lastKnownUrl = url;
+ return url;
+ }
+ }
+
+}
[11/19] lucene-solr:jira/solr-9835: SOLR-10235: fix precommit
Posted by da...@apache.org.
SOLR-10235: fix precommit
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d8442070
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d8442070
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d8442070
Branch: refs/heads/jira/solr-9835
Commit: d8442070cfbb0cec815a465a6c143e2b31870e34
Parents: 4d7bc94
Author: Uwe Schindler <us...@apache.org>
Authored: Tue Mar 7 22:07:13 2017 +0100
Committer: Uwe Schindler <us...@apache.org>
Committed: Tue Mar 7 22:07:13 2017 +0100
----------------------------------------------------------------------
.../org/apache/solr/handler/dataimport/TestJdbcDataSource.java | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d8442070/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
index dcb4dbc..e9908f9 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
@@ -35,6 +35,7 @@ import java.util.Properties;
import javax.sql.DataSource;
+import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.handler.dataimport.JdbcDataSource.ResultSetIterator;
import static org.mockito.Mockito.*;
import org.junit.After;
@@ -635,6 +636,7 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
return 0;
}
+ @SuppressForbidden(reason="Required by JDBC")
@Override
public java.util.logging.Logger getParentLogger() throws java.sql.SQLFeatureNotSupportedException {
throw new java.sql.SQLFeatureNotSupportedException();
[02/19] lucene-solr:jira/solr-9835: LUCENE-7712: SimpleQueryParser
now parses foo~ as foo~2
Posted by da...@apache.org.
LUCENE-7712: SimpleQueryParser now parses foo~ as foo~2
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/21559fe8
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/21559fe8
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/21559fe8
Branch: refs/heads/jira/solr-9835
Commit: 21559fe86da5e84c75c25b8373f6c78f1ac75a8f
Parents: 7af6cc9
Author: Mike McCandless <mi...@apache.org>
Authored: Tue Mar 7 06:55:47 2017 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Tue Mar 7 06:55:47 2017 -0500
----------------------------------------------------------------------
lucene/CHANGES.txt | 4 ++++
.../apache/lucene/queryparser/simple/SimpleQueryParser.java | 8 +++++++-
.../lucene/queryparser/simple/TestSimpleQueryParser.java | 2 +-
3 files changed, 12 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21559fe8/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index a339760..b067fde 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -150,6 +150,10 @@ New Features
efficiently filter out duplicate suggestions (Uwe Schindler, Mike
McCandless)
+* LUCENE-7712: SimpleQueryParser now supports default fuzziness
+ syntax, mapping foo~ to a FuzzyQuery with edit distance 2. (Lee
+ Hinman, David Pilato via Mike McCandless)
+
Bug Fixes
* LUCENE-7630: Fix (Edge)NGramTokenFilter to no longer drop payloads
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21559fe8/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
index 6e05aec..2a7f7ea 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/simple/SimpleQueryParser.java
@@ -498,7 +498,13 @@ public class SimpleQueryParser extends QueryBuilder {
}
int fuzziness = 0;
try {
- fuzziness = Integer.parseInt(new String(slopText, 0, slopLength));
+ String fuzzyString = new String(slopText, 0, slopLength);
+ if ("".equals(fuzzyString)) {
+ // Use automatic fuzziness, ~2
+ fuzziness = 2;
+ } else {
+ fuzziness = Integer.parseInt(fuzzyString);
+ }
} catch (NumberFormatException e) {
// swallow number format exceptions parsing fuzziness
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/21559fe8/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
index d70a22c..169caa2 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/simple/TestSimpleQueryParser.java
@@ -88,7 +88,7 @@ public class TestSimpleQueryParser extends LuceneTestCase {
Query expected = new FuzzyQuery(new Term("field", "foobar"), 2);
assertEquals(expected, parse("foobar~2"));
- assertEquals(regular, parse("foobar~"));
+ assertEquals(expected, parse("foobar~"));
assertEquals(regular, parse("foobar~a"));
assertEquals(regular, parse("foobar~1a"));
[06/19] lucene-solr:jira/solr-9835: doap entries for 6.4.2
Posted by da...@apache.org.
doap entries for 6.4.2
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/57e8543b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/57e8543b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/57e8543b
Branch: refs/heads/jira/solr-9835
Commit: 57e8543bfd08533132d145985cadfcbdc5c12c36
Parents: 0d2c027
Author: Ishan Chattopadhyaya <is...@apache.org>
Authored: Wed Mar 8 00:43:01 2017 +0530
Committer: Ishan Chattopadhyaya <is...@apache.org>
Committed: Wed Mar 8 00:48:48 2017 +0530
----------------------------------------------------------------------
dev-tools/doap/lucene.rdf | 7 +++++++
dev-tools/doap/solr.rdf | 7 +++++++
2 files changed, 14 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57e8543b/dev-tools/doap/lucene.rdf
----------------------------------------------------------------------
diff --git a/dev-tools/doap/lucene.rdf b/dev-tools/doap/lucene.rdf
index 4b57d6c..c1950a0 100644
--- a/dev-tools/doap/lucene.rdf
+++ b/dev-tools/doap/lucene.rdf
@@ -68,6 +68,13 @@
<release>
<Version>
+ <name>lucene-6.4.2</name>
+ <created>2017-03-07</created>
+ <revision>6.4.2</revision>
+ </Version>
+ </release>
+ <release>
+ <Version>
<name>lucene-6.4.1</name>
<created>2017-02-06</created>
<revision>6.4.1</revision>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/57e8543b/dev-tools/doap/solr.rdf
----------------------------------------------------------------------
diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf
index 47a6652..45efd08 100644
--- a/dev-tools/doap/solr.rdf
+++ b/dev-tools/doap/solr.rdf
@@ -68,6 +68,13 @@
<release>
<Version>
+ <name>solr-6.4.2</name>
+ <created>2017-03-07</created>
+ <revision>6.4.2</revision>
+ </Version>
+ </release>
+ <release>
+ <Version>
<name>solr-6.4.1</name>
<created>2017-02-06</created>
<revision>6.4.1</revision>
[04/19] lucene-solr:jira/solr-9835: SOLR-10226 JMX metric
avgTimePerRequest broken.
Posted by da...@apache.org.
SOLR-10226 JMX metric avgTimePerRequest broken.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/2d51a42d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/2d51a42d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/2d51a42d
Branch: refs/heads/jira/solr-9835
Commit: 2d51a42d3cae3eddc89f407cd3611fa2cd5d55d0
Parents: 190f4b6
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Tue Mar 7 17:59:57 2017 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Tue Mar 7 18:09:58 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 6 ++++++
.../src/java/org/apache/solr/handler/RequestHandlerBase.java | 7 ++++++-
.../core/src/java/org/apache/solr/util/stats/MetricUtils.java | 2 +-
3 files changed, 13 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d51a42d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index db721da..5b0eb03 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -96,6 +96,10 @@ Detailed Change List
Upgrade Notes
----------------------
+* SOLR-10226: JMX metric "avgTimePerRequest" (and the corresponding metric in the metrics API for
+ each handler) used to be a simple non-decaying average based on total cumulative time and the
+ number of requests. New Codahale Metrics implementation applies exponential decay to this value,
+ which heavily biases the average towards the last 5 minutes. (ab)
New Features
----------------------
@@ -212,6 +216,8 @@ Bug Fixes
* SOLR-10088: Installer script does not put zoo.cfg in SOLR_HOME (janhoy)
+* SOLR-10226: add back "totalTime" metric to all handlers. See also the back-compat note. (ab)
+
Optimizations
----------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d51a42d/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 3c6f5fa..1958e11 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -66,6 +66,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
private Meter numTimeouts = new Meter();
private Counter requests = new Counter();
private Timer requestTimes = new Timer();
+ private Counter totalTime = new Counter();
private final long handlerStart;
@@ -143,6 +144,7 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
numTimeouts = manager.meter(registryName, "timeouts", getCategory().toString(), scope);
requests = manager.counter(registryName, "requests", getCategory().toString(), scope);
requestTimes = manager.timer(registryName, "requestTimes", getCategory().toString(), scope);
+ totalTime = manager.counter(registryName, "totalTime", getCategory().toString(), scope);
}
public static SolrParams getSolrParamsFromNamedList(NamedList args, String key) {
@@ -209,7 +211,8 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
}
}
} finally {
- timer.stop();
+ long elapsed = timer.stop();
+ totalTime.inc(elapsed);
}
}
@@ -292,6 +295,8 @@ public abstract class RequestHandlerBase implements SolrRequestHandler, SolrInfo
lst.add("serverErrors", numServerErrors.getCount());
lst.add("clientErrors", numClientErrors.getCount());
lst.add("timeouts", numTimeouts.getCount());
+ // convert totalTime to ms
+ lst.add("totalTime", MetricUtils.nsToMs(totalTime.getCount()));
MetricUtils.addMetrics(lst, requestTimes);
return lst;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/2d51a42d/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index 0d386ae..80f035b 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -63,7 +63,7 @@ public class MetricUtils {
* @param ns the amount of time in nanoseconds
* @return the amount of time in milliseconds
*/
- static double nsToMs(double ns) {
+ public static double nsToMs(double ns) {
return ns / TimeUnit.MILLISECONDS.toNanos(1);
}
[05/19] lucene-solr:jira/solr-9835: SOLR-10235: Fix DIH's
TestJdbcDataSource to work with Java 9 and other Java runtimes that do not
use the same DriverManager implementation like Oracle's original one
Posted by da...@apache.org.
SOLR-10235: Fix DIH's TestJdbcDataSource to work with Java 9 and other Java runtimes that do not use the same DriverManager implementation like Oracle's original one
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0d2c0278
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0d2c0278
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0d2c0278
Branch: refs/heads/jira/solr-9835
Commit: 0d2c027857bfca3486399b0e6b19a5887081287a
Parents: 2d51a42
Author: Uwe Schindler <us...@apache.org>
Authored: Tue Mar 7 19:01:15 2017 +0100
Committer: Uwe Schindler <us...@apache.org>
Committed: Tue Mar 7 19:01:15 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 6 ++
.../handler/dataimport/TestJdbcDataSource.java | 78 +++++++++++++++++---
2 files changed, 73 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0d2c0278/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5b0eb03..dc97456 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -300,6 +300,12 @@ Other Changes
* SOLR-10230: default TTL of PKIAuthenticationPlugin increased to 10secs (noble)
+* SOLR-10235: Fix DIH's TestJdbcDataSource to work with Java 9 and other Java runtimes that
+ do not use the same DriverManager implementation like Oracle's original one. The test now
+ uses a fully implemented Driver instance returning a mock connection. The test also works
+ correct now if other drivers were installed before test execution (e.g., through IDE).
+ (hossman, Uwe Schindler)
+
================== 6.4.2 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0d2c0278/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
index 2f6b24c..dcb4dbc 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
@@ -35,7 +35,6 @@ import java.util.Properties;
import javax.sql.DataSource;
-import org.apache.lucene.util.Constants;
import org.apache.solr.handler.dataimport.JdbcDataSource.ResultSetIterator;
import static org.mockito.Mockito.*;
import org.junit.After;
@@ -485,17 +484,16 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
@Test
public void testRetrieveFromDriverManager() throws Exception {
- assumeFalse("In Java 9, Class.forName() does not work for mock classes", Constants.JRE_IS_MINIMUM_JAVA9);
- DriverManager.registerDriver(driver);
+ // we're not (directly) using a Mockito based mock class here because it won't have a consistent class name
+ // that will work with DriverManager's class bindings
+ MockDriver mockDriver = new MockDriver(connection);
+ DriverManager.registerDriver(mockDriver);
try {
- when(driver.connect(notNull(),notNull())).thenReturn(connection);
-
- props.put(JdbcDataSource.DRIVER, driver.getClass().getName());
- props.put(JdbcDataSource.URL, "jdbc:fakedb");
+ props.put(JdbcDataSource.DRIVER, MockDriver.class.getName());
+ props.put(JdbcDataSource.URL, MockDriver.MY_JDBC_URL);
props.put("holdability", "HOLD_CURSORS_OVER_COMMIT");
- Connection conn = jdbcDataSource.createConnectionFactory(context, props)
- .call();
+ Connection conn = jdbcDataSource.createConnectionFactory(context, props).call();
verify(connection).setAutoCommit(false);
verify(connection).setHoldability(1);
@@ -504,7 +502,7 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
} catch(Exception e) {
throw e;
} finally {
- DriverManager.deregisterDriver(driver);
+ DriverManager.deregisterDriver(mockDriver);
}
}
@@ -594,5 +592,63 @@ public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
byte[] content = "secret".getBytes(StandardCharsets.UTF_8);
createFile(tmpdir, "enckeyfile.txt", content, false);
return new File(tmpdir, "enckeyfile.txt").getAbsolutePath();
- }
+ }
+
+ /**
+ * A stub driver that returns our mocked connection for connection URL {@link #MY_JDBC_URL}.
+ * <p>
+ * This class is used instead of a Mockito mock because {@link DriverManager} uses the class
+ * name to lookup the driver and also requires the driver to behave in a sane way, if other
+ * drivers are registered in the runtime. A simple Mockito mock is likely to break
+ * depending on JVM runtime version. So this class implements a full {@link Driver},
+ * so {@code DriverManager} can do whatever it wants to find the correct driver for a URL.
+ */
+ public static final class MockDriver implements Driver {
+ public static final String MY_JDBC_URL = "jdbc:fakedb";
+ private final Connection conn;
+
+ public MockDriver() throws SQLException {
+ throw new AssertionError("The driver should never be directly instantiated by DIH's JdbcDataSource");
+ }
+
+ MockDriver(Connection conn) throws SQLException {
+ this.conn = conn;
+ }
+
+ @Override
+ public boolean acceptsURL(String url) throws java.sql.SQLException {
+ return MY_JDBC_URL.equals(url);
+ }
+
+ @Override
+ public Connection connect(String url, Properties info) throws java.sql.SQLException {
+ return acceptsURL(url) ? conn : null;
+ }
+
+ @Override
+ public int getMajorVersion() {
+ return 1;
+ }
+
+ @Override
+ public int getMinorVersion() {
+ return 0;
+ }
+
+ @Override
+ public java.util.logging.Logger getParentLogger() throws java.sql.SQLFeatureNotSupportedException {
+ throw new java.sql.SQLFeatureNotSupportedException();
+ }
+
+ @Override
+ public java.sql.DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
+ return new java.sql.DriverPropertyInfo[0];
+ }
+
+ @Override
+ public boolean jdbcCompliant() {
+ // we are not fully compliant:
+ return false;
+ }
+ }
}
[12/19] lucene-solr:jira/solr-9835: LUCENE-7695: support synonyms in
ComplexPhraseQueryParser
Posted by da...@apache.org.
LUCENE-7695: support synonyms in ComplexPhraseQueryParser
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8a549293
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8a549293
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8a549293
Branch: refs/heads/jira/solr-9835
Commit: 8a5492930eff393de824450f77f27d98a204df3d
Parents: d844207
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Sun Mar 5 12:24:47 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Wed Mar 8 11:20:35 2017 +0300
----------------------------------------------------------------------
lucene/CHANGES.txt | 3 ++
.../complexPhrase/ComplexPhraseQueryParser.java | 21 +++++++++---
.../complexPhrase/TestComplexPhraseQuery.java | 36 +++++++++++++++++---
3 files changed, 52 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a549293/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index b067fde..a8f7ee4 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -204,6 +204,9 @@ Improvements
IndexInput description instead of plain IOException (Mike Drob via
Mike McCandless)
+* LUCENE-7695: ComplexPhraseQueryParser to support query time synonyms (Markus Jelsma
+ via Mikhail Khludnev)
+
Optimizations
* LUCENE-7641: Optimized point range queries to compute documents that do not
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a549293/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
index 6e18960..32f4fb3 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
@@ -28,6 +28,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.BooleanClause;
+import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.IndexSearcher;
@@ -35,6 +36,7 @@ import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.MultiTermQuery.RewriteMethod;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.SynonymQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.spans.SpanBoostQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
@@ -257,6 +259,7 @@ public class ComplexPhraseQueryParser extends QueryParser {
// ArrayList spanClauses = new ArrayList();
if (contents instanceof TermQuery
|| contents instanceof MultiTermQuery
+ || contents instanceof SynonymQuery
) {
return contents;
}
@@ -287,9 +290,11 @@ public class ComplexPhraseQueryParser extends QueryParser {
qc = ((BoostQuery) qc).getQuery();
}
- if (qc instanceof BooleanQuery) {
+ if (qc instanceof BooleanQuery || qc instanceof SynonymQuery) {
ArrayList<SpanQuery> sc = new ArrayList<>();
- addComplexPhraseClause(sc, (BooleanQuery) qc);
+ BooleanQuery booleanCaluse = qc instanceof BooleanQuery ?
+ (BooleanQuery) qc : convert((SynonymQuery) qc);
+ addComplexPhraseClause(sc, booleanCaluse);
if (sc.size() > 0) {
allSpanClauses[i] = sc.get(0);
} else {
@@ -309,14 +314,14 @@ public class ComplexPhraseQueryParser extends QueryParser {
if (qc instanceof TermQuery) {
TermQuery tq = (TermQuery) qc;
allSpanClauses[i] = new SpanTermQuery(tq.getTerm());
- } else {
+ } else {
throw new IllegalArgumentException("Unknown query type \""
+ qc.getClass().getName()
+ "\" found in phrase query string \""
+ phrasedQueryStringContents + "\"");
}
-
}
+
i += 1;
}
if (numNegatives == 0) {
@@ -354,6 +359,14 @@ public class ComplexPhraseQueryParser extends QueryParser {
return snot;
}
+ private BooleanQuery convert(SynonymQuery qc) {
+ BooleanQuery.Builder bqb = new BooleanQuery.Builder();
+ for (Term t : qc.getTerms()){
+ bqb.add(new BooleanClause(new TermQuery(t), Occur.SHOULD));
+ }
+ return bqb.build();
+ }
+
private void addComplexPhraseClause(List<SpanQuery> spanClauses, BooleanQuery qc) {
ArrayList<SpanQuery> ors = new ArrayList<>();
ArrayList<SpanQuery> nots = new ArrayList<>();
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8a549293/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
----------------------------------------------------------------------
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
index 28b600b..5c45e28 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
@@ -20,6 +20,7 @@ import java.util.HashSet;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
+import org.apache.lucene.analysis.MockSynonymAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DirectoryReader;
@@ -39,7 +40,11 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
new DocData("john smith", "1", "developer"),
new DocData("johathon smith", "2", "developer"),
new DocData("john percival smith", "3", "designer"),
- new DocData("jackson waits tom", "4", "project manager")
+ new DocData("jackson waits tom", "4", "project manager"),
+ new DocData("johny perkins", "5", "orders pizza"),
+ new DocData("hapax neverson", "6", "never matches"),
+ new DocData("dog cigar", "7", "just for synonyms"),
+ new DocData("dogs don't smoke cigarettes", "8", "just for synonyms"),
};
private IndexSearcher searcher;
@@ -73,12 +78,30 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
}
public void testSingleTermPhrase() throws Exception {
- checkMatches("\"joh*\" \"tom\"", "1,2,3,4");
+ checkMatches("\"joh*\"","1,2,3,5");
+ checkMatches("\"joh~\"","1,3,5");
+ checkMatches("\"joh*\" \"tom\"", "1,2,3,4,5");
checkMatches("+\"j*\" +\"tom\"", "4");
- checkMatches("\"jo*\" \"[sma TO smZ]\" ", "1,2,3");
+ checkMatches("\"jo*\" \"[sma TO smZ]\" ", "1,2,3,5,8");
checkMatches("+\"j*hn\" +\"sm*h\"", "1,3");
}
+ public void testSynonyms() throws Exception {
+ checkMatches("\"dogs\"","8");
+ MockSynonymAnalyzer synonym = new MockSynonymAnalyzer();
+ checkMatches("\"dogs\"","7,8",synonym);
+ // synonym is unidirectional
+ checkMatches("\"dog\"","7",synonym);
+ checkMatches("\"dogs cigar*\"","");
+ checkMatches("\"dog cigar*\"","7");
+ checkMatches("\"dogs cigar*\"","7", synonym);
+ checkMatches("\"dog cigar*\"","7", synonym);
+ checkMatches("\"dogs cigar*\"~2","7,8", synonym);
+ // synonym is unidirectional
+ checkMatches("\"dog cigar*\"~2","7", synonym);
+
+ }
+
public void testUnOrderedProximitySearches() throws Exception {
inOrder = true;
@@ -98,8 +121,13 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
}
private void checkMatches(String qString, String expectedVals)
+ throws Exception {
+ checkMatches(qString, expectedVals, analyzer);
+ }
+
+ private void checkMatches(String qString, String expectedVals, Analyzer anAnalyzer)
throws Exception {
- ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(defaultFieldName, analyzer);
+ ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(defaultFieldName, anAnalyzer);
qp.setInOrder(inOrder);
qp.setFuzzyPrefixLength(1); // usually a good idea