You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/04/06 13:57:27 UTC
[01/12] lucene-solr:jira/solr-9959: SOLR-10347: Remove index level
boost support from 'documents' section of the admin UI
Repository: lucene-solr
Updated Branches:
refs/heads/jira/solr-9959 a8d4e677c -> d503fc8c1
SOLR-10347: Remove index level boost support from 'documents' section of the admin UI
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f08889f3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f08889f3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f08889f3
Branch: refs/heads/jira/solr-9959
Commit: f08889f390765c58a7f44f2ff1052484037ce336
Parents: 05749d0
Author: Tomas Fernandez Lobbe <tf...@apache.org>
Authored: Tue Apr 4 13:11:02 2017 -0700
Committer: Tomas Fernandez Lobbe <tf...@apache.org>
Committed: Tue Apr 4 13:11:02 2017 -0700
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
solr/webapp/web/js/angular/controllers/documents.js | 2 --
solr/webapp/web/js/scripts/documents.js | 8 --------
solr/webapp/web/partials/documents.html | 7 -------
solr/webapp/web/tpl/documents.html | 7 -------
5 files changed, 3 insertions(+), 24 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f08889f3/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 6b0658c..4fa0353 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -93,6 +93,9 @@ Optimizations
Other Changes
* SOLR-10236: Removed FieldType.getNumericType(). Use getNumberType() instead. (Tom�s Fern�ndez L�bbe)
+* SOLR-10347: Removed index level boost support from "documents" section of the admin UI (Amrit Sarkar via
+ Tom�s Fern�ndez L�bbe)
+
----------------------
================== 6.6.0 ==================
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f08889f3/solr/webapp/web/js/angular/controllers/documents.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/angular/controllers/documents.js b/solr/webapp/web/js/angular/controllers/documents.js
index be37c9f..d38265a 100644
--- a/solr/webapp/web/js/angular/controllers/documents.js
+++ b/solr/webapp/web/js/angular/controllers/documents.js
@@ -38,7 +38,6 @@ solrAdminApp.controller('DocumentsController',
$scope.type = "json";
$scope.commitWithin = 1000;
$scope.overwrite = true;
- $scope.boost = "1.0";
};
$scope.refresh();
@@ -78,7 +77,6 @@ solrAdminApp.controller('DocumentsController',
}
params.commitWithin = $scope.commitWithin;
- params.boost = $scope.boost;
params.overwrite = $scope.overwrite;
params.core = $routeParams.core;
params.wt = "json";
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f08889f3/solr/webapp/web/js/scripts/documents.js
----------------------------------------------------------------------
diff --git a/solr/webapp/web/js/scripts/documents.js b/solr/webapp/web/js/scripts/documents.js
index 9d12e23..45cfbed 100644
--- a/solr/webapp/web/js/scripts/documents.js
+++ b/solr/webapp/web/js/scripts/documents.js
@@ -29,7 +29,6 @@ var content_generator = {
//Utiltity function for turning on/off various elements
function toggles(documents_form, show_json, show_file, show_doc, doc_text, show_wizard) {
- var json_only = $('#json-only');
var the_document = $('#document', documents_form);
if (show_doc) {
//console.log("doc: " + doc_text);
@@ -38,11 +37,6 @@ function toggles(documents_form, show_json, show_file, show_doc, doc_text, show_
} else {
the_document.hide();
}
- if (show_json) {
- json_only.show();
- } else {
- json_only.hide();
- }
var file_upload = $('#file-upload', documents_form);
var upload_only = $('#upload-only', documents_form);
if (show_file) {
@@ -233,7 +227,6 @@ sammy.get
.trigger('change');
var the_document = $('#document', documents_form).val();
var commit_within = $('#commitWithin', documents_form).val();
- var boost = $('#boost', documents_form).val();
var overwrite = $('#overwrite', documents_form).val();
var the_command = "";
var content_type = "";
@@ -245,7 +238,6 @@ sammy.get
//create a JSON command
the_command = "{"
+ '"add":{ "doc":' + the_document + ","
- + '"boost":' + boost + ","
+ '"overwrite":' + overwrite + ","
+ '"commitWithin":' + commit_within
+ "}}";
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f08889f3/solr/webapp/web/partials/documents.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/partials/documents.html b/solr/webapp/web/partials/documents.html
index 74d034f..2bf3f12 100644
--- a/solr/webapp/web/partials/documents.html
+++ b/solr/webapp/web/partials/documents.html
@@ -88,13 +88,6 @@
</label>
<input ng-model="overwrite" type="text" id="overwrite" value="true" title="Overwrite">
</div>
- <!-- Boost is json only, since the XML has it embedded -->
- <div id="json-only" ng-show="type=='json'">
- <label for="boost">
- <a rel="help">Boost</a>
- </label>
- <input ng-model="boost" type="text" id="boost" value="1.0" title="Document Boost">
- </div>
</div>
</div>
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f08889f3/solr/webapp/web/tpl/documents.html
----------------------------------------------------------------------
diff --git a/solr/webapp/web/tpl/documents.html b/solr/webapp/web/tpl/documents.html
index bd953a4..d2a2e0e 100644
--- a/solr/webapp/web/tpl/documents.html
+++ b/solr/webapp/web/tpl/documents.html
@@ -85,13 +85,6 @@
</label>
<input type="text" id="overwrite" value="true" title="Overwrite">
</div>
- <!-- Boost is json only, since the XML has it embedded -->
- <div id="json-only">
- <label for="boost">
- <a rel="help">Boost</a>
- </label>
- <input type="text" id="boost" value="1.0" title="Document Boost">
- </div>
</div>
</div>
[09/12] lucene-solr:jira/solr-9959: SOLR-10239: MOVEREPLICA API
Posted by ab...@apache.org.
SOLR-10239: MOVEREPLICA API
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9c2ef561
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9c2ef561
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9c2ef561
Branch: refs/heads/jira/solr-9959
Commit: 9c2ef561e5fb17e9f74abbad62f3af0d81794b55
Parents: 37b6c60
Author: Cao Manh Dat <da...@apache.org>
Authored: Thu Apr 6 15:48:38 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Thu Apr 6 15:48:38 2017 +0700
----------------------------------------------------------------------
.../org/apache/solr/cloud/MoveReplicaCmd.java | 193 +++++++++++++++++++
.../cloud/OverseerCollectionMessageHandler.java | 1 +
.../solr/handler/admin/CollectionsHandler.java | 12 +-
.../CollectionsAPIAsyncDistributedZkTest.java | 16 +-
.../org/apache/solr/cloud/MoveReplicaTest.java | 125 ++++++++++++
.../HdfsCollectionsAPIDistributedZkTest.java | 114 +++++++++++
.../solrj/request/CollectionAdminRequest.java | 38 ++++
.../solr/common/params/CollectionParams.java | 1 +
8 files changed, 497 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
new file mode 100644
index 0000000..09d3b79
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
@@ -0,0 +1,193 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.params.CoreAdminParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.OverseerCollectionMessageHandler.*;
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
+import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
+
+public class MoveReplicaCmd implements Cmd{
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private final OverseerCollectionMessageHandler ocmh;
+
+ public MoveReplicaCmd(OverseerCollectionMessageHandler ocmh) {
+ this.ocmh = ocmh;
+ }
+
+ @Override
+ public void call(ClusterState state, ZkNodeProps message, NamedList results) throws Exception {
+ moveReplica(ocmh.zkStateReader.getClusterState(), message, results);
+ }
+
+ private void moveReplica(ClusterState clusterState, ZkNodeProps message, NamedList results) throws Exception {
+ log.info("moveReplica() : {}", Utils.toJSONString(message));
+ ocmh.checkRequired(message, COLLECTION_PROP, "targetNode");
+ String collection = message.getStr(COLLECTION_PROP);
+ String targetNode = message.getStr("targetNode");
+
+ String async = message.getStr(ASYNC);
+
+ DocCollection coll = clusterState.getCollection(collection);
+ if (coll == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Collection: " + collection + " does not exist");
+ }
+ Replica replica = null;
+ if (message.containsKey(REPLICA_PROP)) {
+ String replicaName = message.getStr(REPLICA_PROP);
+ replica = coll.getReplica(replicaName);
+ if (replica == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+ "Collection: " + collection + " replica: " + replicaName + " does not exist");
+ }
+ } else {
+ ocmh.checkRequired(message, SHARD_ID_PROP, "fromNode");
+ String fromNode = message.getStr("fromNode");
+ String shardId = message.getStr(SHARD_ID_PROP);
+ Slice slice = clusterState.getCollection(collection).getSlice(shardId);
+ List<Replica> sliceReplicas = new ArrayList<>(slice.getReplicas());
+ Collections.shuffle(sliceReplicas, RANDOM);
+ for (Replica r : slice.getReplicas()) {
+ if (r.getNodeName().equals(fromNode)) {
+ replica = r;
+ }
+ }
+ if (replica == null) {
+ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+ "Collection: " + collection + " node: " + fromNode + " do not have any replica belong to shard: " + shardId);
+ }
+ }
+
+ log.info("Replica will be moved {}", replica);
+ Slice slice = null;
+ for (Slice s : coll.getSlices()) {
+ if (s.getReplicas().contains(replica)) {
+ slice = s;
+ }
+ }
+ assert slice != null;
+ Object dataDir = replica.get("dataDir");
+ if (dataDir != null && dataDir.toString().startsWith("hdfs:/")) {
+ moveHdfsReplica(clusterState, results, dataDir.toString(), targetNode, async, coll, replica, slice);
+ } else {
+ moveNormalReplica(clusterState, results, targetNode, async, coll, replica, slice);
+ }
+ }
+
+ private void moveHdfsReplica(ClusterState clusterState, NamedList results, String dataDir, String targetNode, String async,
+ DocCollection coll, Replica replica, Slice slice) throws Exception {
+ String newCoreName = Assign.buildCoreName(coll, slice.getName());
+
+ ZkNodeProps removeReplicasProps = new ZkNodeProps(
+ COLLECTION_PROP, coll.getName(),
+ SHARD_ID_PROP, slice.getName(),
+ REPLICA_PROP, replica.getName()
+ );
+ removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_DATA_DIR, false);
+ removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_INDEX, false);
+ if(async!=null) removeReplicasProps.getProperties().put(ASYNC, async);
+ NamedList deleteResult = new NamedList();
+ ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, ()->{});
+ if (deleteResult.get("failure") != null) {
+ String errorString = String.format(Locale.ROOT, "Failed to cleanup replica collection=%s shard=%s name=%s",
+ coll.getName(), slice.getName(), replica.getName());
+ log.warn(errorString);
+ results.add("failure", errorString + ", because of : " + deleteResult.get("failure"));
+ return;
+ }
+
+ ZkNodeProps addReplicasProps = new ZkNodeProps(
+ COLLECTION_PROP, coll.getName(),
+ SHARD_ID_PROP, slice.getName(),
+ CoreAdminParams.NODE, targetNode,
+ CoreAdminParams.NAME, newCoreName,
+ CoreAdminParams.DATA_DIR, dataDir);
+ if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
+ NamedList addResult = new NamedList();
+ ocmh.addReplica(clusterState, addReplicasProps, addResult, ()->{});
+ if (addResult.get("failure") != null) {
+ String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
+ " on node=%s", coll.getName(), slice.getName(), targetNode);
+ log.warn(errorString);
+ results.add("failure", errorString);
+ return;
+ } else {
+ String successString = String.format(Locale.ROOT, "MOVEREPLICA action completed successfully, moved replica=%s at node=%s " +
+ "to replica=%s at node=%s", replica.getCoreName(), replica.getNodeName(), newCoreName, targetNode);
+ results.add("success", successString);
+ }
+ }
+
+ private void moveNormalReplica(ClusterState clusterState, NamedList results, String targetNode, String async,
+ DocCollection coll, Replica replica, Slice slice) throws Exception {
+ String newCoreName = Assign.buildCoreName(coll, slice.getName());
+ ZkNodeProps addReplicasProps = new ZkNodeProps(
+ COLLECTION_PROP, coll.getName(),
+ SHARD_ID_PROP, slice.getName(),
+ CoreAdminParams.NODE, targetNode,
+ CoreAdminParams.NAME, newCoreName);
+ if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
+ NamedList addResult = new NamedList();
+ ocmh.addReplica(clusterState, addReplicasProps, addResult, ()->{});
+ if (addResult.get("failure") != null) {
+ String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
+ " on node=%s", coll.getName(), slice.getName(), targetNode);
+ log.warn(errorString);
+ results.add("failure", errorString);
+ return;
+ }
+
+ ZkNodeProps removeReplicasProps = new ZkNodeProps(
+ COLLECTION_PROP, coll.getName(),
+ SHARD_ID_PROP, slice.getName(),
+ REPLICA_PROP, replica.getName());
+ if(async!=null) removeReplicasProps.getProperties().put(ASYNC, async);
+ NamedList deleteResult = new NamedList();
+ ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, ()->{});
+ if (deleteResult.get("failure") != null) {
+ String errorString = String.format(Locale.ROOT, "Failed to cleanup replica collection=%s shard=%s name=%s",
+ coll.getName(), slice.getName(), replica.getName());
+ log.warn(errorString);
+ results.add("failure", errorString + ", because of : " + deleteResult.get("failure"));
+ } else {
+ String successString = String.format(Locale.ROOT, "MOVEREPLICA action completed successfully, moved replica=%s at node=%s " +
+ "to replica=%s at node=%s", replica.getCoreName(), replica.getNodeName(), newCoreName, targetNode);
+ results.add("success", successString);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
index 4d64a00..2c08305 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java
@@ -208,6 +208,7 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler
.put(DELETESHARD, new DeleteShardCmd(this))
.put(DELETEREPLICA, new DeleteReplicaCmd(this))
.put(ADDREPLICA, new AddReplicaCmd(this))
+ .put(MOVEREPLICA, new MoveReplicaCmd(this))
.build()
;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 2e17af6..bb06190 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -859,6 +859,16 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
return null;
}),
REPLACENODE_OP(REPLACENODE, (req, rsp, h) -> req.getParams().required().getAll(req.getParams().getAll(null, "parallel"), "source", "target")),
+ MOVEREPLICA_OP(MOVEREPLICA, (req, rsp, h) -> {
+ Map<String, Object> map = req.getParams().required().getAll(null,
+ COLLECTION_PROP);
+
+ return req.getParams().getAll(map,
+ "fromNode",
+ "targetNode",
+ "replica",
+ "shard");
+ }),
DELETENODE_OP(DELETENODE, (req, rsp, h) -> req.getParams().required().getAll(null, "node"));
public final CollectionOp fun;
CollectionAction action;
@@ -881,7 +891,7 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission
for (CollectionOperation op : values()) {
if (op.action == action) return op;
}
- throw new SolrException(ErrorCode.SERVER_ERROR, "No such action" + action);
+ throw new SolrException(ErrorCode.SERVER_ERROR, "No such action " + action);
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
index dcb115a..30c3c9e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java
@@ -29,6 +29,7 @@ import org.apache.solr.client.solrj.request.CollectionAdminRequest.SplitShard;
import org.apache.solr.client.solrj.response.RequestStatusState;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -178,11 +179,22 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase {
//expected
}
- String replica = shard1.getReplicas().iterator().next().getName();
+ Replica replica = shard1.getReplicas().iterator().next();
+ for (String liveNode : client.getZkStateReader().getClusterState().getLiveNodes()) {
+ if (!replica.getNodeName().equals(liveNode)) {
+ state = new CollectionAdminRequest.MoveReplica(collection, replica.getName(), liveNode)
+ .processAndWait(client, MAX_TIMEOUT_SECONDS);
+ assertSame("MoveReplica did not complete", RequestStatusState.COMPLETED, state);
+ break;
+ }
+ }
+
+ shard1 = client.getZkStateReader().getClusterState().getSlice(collection, "shard1");
+ String replicaName = shard1.getReplicas().iterator().next().getName();
state = new CollectionAdminRequest.DeleteReplica()
.setCollectionName(collection)
.setShardName("shard1")
- .setReplica(replica)
+ .setReplica(replicaName)
.processAndWait(client, MAX_TIMEOUT_SECONDS);
assertSame("DeleteReplica did not complete", RequestStatusState.COMPLETED, state);
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
new file mode 100644
index 0000000..4368fea
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaTest.java
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.client.solrj.response.CoreAdminResponse;
+import org.apache.solr.client.solrj.response.RequestStatusState;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class MoveReplicaTest extends SolrCloudTestCase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ @BeforeClass
+ public static void setupCluster() throws Exception {
+ configureCluster(4)
+ .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf"))
+ .configure();
+ }
+
+ protected String getSolrXml() {
+ return "solr.xml";
+ }
+
+ @Test
+ public void test() throws Exception {
+ cluster.waitForAllNodes(5000);
+ String coll = "movereplicatest_coll";
+ log.info("total_jettys: " + cluster.getJettySolrRunners().size());
+
+ CloudSolrClient cloudClient = cluster.getSolrClient();
+
+ CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf1", 2, 2);
+ create.setMaxShardsPerNode(2);
+ cloudClient.request(create);
+
+ Replica replica = getRandomReplica(coll, cloudClient);
+ Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
+ ArrayList<String> l = new ArrayList<>(liveNodes);
+ Collections.shuffle(l, random());
+ String targetNode = null;
+ for (String node : liveNodes) {
+ if (!replica.getNodeName().equals(node)) {
+ targetNode = node;
+ break;
+ }
+ }
+ assertNotNull(targetNode);
+ String shardId = null;
+ for (Slice slice : cloudClient.getZkStateReader().getClusterState().getCollection(coll).getSlices()) {
+ if (slice.getReplicas().contains(replica)) {
+ shardId = slice.getName();
+ }
+ }
+
+ CollectionAdminRequest.MoveReplica moveReplica = new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode);
+ moveReplica.processAsync("000", cloudClient);
+ CollectionAdminRequest.RequestStatus requestStatus = CollectionAdminRequest.requestStatus("000");
+ // wait for async request success
+ boolean success = false;
+ for (int i = 0; i < 200; i++) {
+ CollectionAdminRequest.RequestStatusResponse rsp = requestStatus.process(cloudClient);
+ if (rsp.getRequestStatus() == RequestStatusState.COMPLETED) {
+ success = true;
+ break;
+ }
+ assertFalse(rsp.getRequestStatus() == RequestStatusState.FAILED);
+ Thread.sleep(50);
+ }
+ assertTrue(success);
+ checkNumOfCores(cloudClient, replica.getNodeName(), 0);
+ checkNumOfCores(cloudClient, targetNode, 2);
+
+ moveReplica = new CollectionAdminRequest.MoveReplica(coll, shardId, targetNode, replica.getNodeName());
+ moveReplica.process(cloudClient);
+ checkNumOfCores(cloudClient, replica.getNodeName(), 1);
+ checkNumOfCores(cloudClient, targetNode, 1);
+ }
+
+ private Replica getRandomReplica(String coll, CloudSolrClient cloudClient) {
+ List<Replica> replicas = cloudClient.getZkStateReader().getClusterState().getCollection(coll).getReplicas();
+ Collections.shuffle(replicas, random());
+ return replicas.get(0);
+ }
+
+ private void checkNumOfCores(CloudSolrClient cloudClient, String nodeName, int expectedCores) throws IOException, SolrServerException {
+ assertEquals(nodeName + " does not have expected number of cores",expectedCores, getNumOfCores(cloudClient, nodeName));
+ }
+
+ private int getNumOfCores(CloudSolrClient cloudClient, String nodeName) throws IOException, SolrServerException {
+ try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeName))) {
+ CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient);
+ return status.getCoreStatus().size();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
index 1b830ad..58d499b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsCollectionsAPIDistributedZkTest.java
@@ -16,15 +16,37 @@
*/
package org.apache.solr.cloud.hdfs;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
import com.carrotsearch.randomizedtesting.annotations.Nightly;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.Metric;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.client.solrj.request.CoreStatus;
+import org.apache.solr.client.solrj.response.CoreAdminResponse;
import org.apache.solr.cloud.CollectionsAPIDistributedZkTest;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkConfigManager;
+import org.apache.solr.metrics.SolrMetricManager;
import org.apache.solr.util.BadHdfsThreadsFilter;
import org.junit.AfterClass;
import org.junit.BeforeClass;
+import org.junit.Test;
@Slow
@Nightly
@@ -59,4 +81,96 @@ public class HdfsCollectionsAPIDistributedZkTest extends CollectionsAPIDistribut
System.clearProperty("solr.hdfs.home");
}
+ @Test
+ public void moveReplicaTest() throws Exception {
+ cluster.waitForAllNodes(5000);
+ String coll = "movereplicatest_coll";
+
+ CloudSolrClient cloudClient = cluster.getSolrClient();
+
+ CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(coll, "conf", 2, 2);
+ create.setMaxShardsPerNode(2);
+ cloudClient.request(create);
+
+ for (int i = 0; i < 10; i++) {
+ cloudClient.add(coll, sdoc("id",String.valueOf(i)));
+ cloudClient.commit(coll);
+ }
+
+ List<Slice> slices = new ArrayList<>(cloudClient.getZkStateReader().getClusterState().getCollection(coll).getSlices());
+ Collections.shuffle(slices, random());
+ Slice slice = null;
+ Replica replica = null;
+ for (Slice s : slices) {
+ slice = s;
+ for (Replica r : s.getReplicas()) {
+ if (s.getLeader() != r) {
+ replica = r;
+ }
+ }
+ }
+ String dataDir = getDataDir(replica);
+
+ Set<String> liveNodes = cloudClient.getZkStateReader().getClusterState().getLiveNodes();
+ ArrayList<String> l = new ArrayList<>(liveNodes);
+ Collections.shuffle(l, random());
+ String targetNode = null;
+ for (String node : liveNodes) {
+ if (!replica.getNodeName().equals(node)) {
+ targetNode = node;
+ break;
+ }
+ }
+ assertNotNull(targetNode);
+
+ CollectionAdminRequest.MoveReplica moveReplica = new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode);
+ moveReplica.process(cloudClient);
+
+ checkNumOfCores(cloudClient, replica.getNodeName(), 0);
+ checkNumOfCores(cloudClient, targetNode, 2);
+
+ waitForState("Wait for recovery finish failed",coll, clusterShape(2,2));
+ slice = cloudClient.getZkStateReader().getClusterState().getCollection(coll).getSlice(slice.getName());
+ boolean found = false;
+ for (Replica newReplica : slice.getReplicas()) {
+ if (getDataDir(newReplica).equals(dataDir)) {
+ found = true;
+ }
+ }
+ assertTrue(found);
+
+
+ // data dir is reused so replication will be skipped
+ for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+ SolrMetricManager manager = jetty.getCoreContainer().getMetricManager();
+ List<String> registryNames = manager.registryNames().stream()
+ .filter(s -> s.startsWith("solr.core.")).collect(Collectors.toList());
+ for (String registry : registryNames) {
+ Map<String, Metric> metrics = manager.registry(registry).getMetrics();
+ Counter counter = (Counter) metrics.get("REPLICATION./replication.requests");
+ if (counter != null) {
+ assertEquals(0, counter.getCount());
+ }
+ }
+ }
+ }
+
+
+ private void checkNumOfCores(CloudSolrClient cloudClient, String nodeName, int expectedCores) throws IOException, SolrServerException {
+ assertEquals(nodeName + " does not have expected number of cores",expectedCores, getNumOfCores(cloudClient, nodeName));
+ }
+
+ private int getNumOfCores(CloudSolrClient cloudClient, String nodeName) throws IOException, SolrServerException {
+ try (HttpSolrClient coreclient = getHttpSolrClient(cloudClient.getZkStateReader().getBaseUrlForNodeName(nodeName))) {
+ CoreAdminResponse status = CoreAdminRequest.getStatus(null, coreclient);
+ return status.getCoreStatus().size();
+ }
+ }
+
+ private String getDataDir(Replica replica) throws IOException, SolrServerException {
+ try (HttpSolrClient coreclient = getHttpSolrClient(replica.getBaseUrl())) {
+ CoreStatus status = CoreAdminRequest.getCoreStatus(replica.getCoreName(), coreclient);
+ return status.getDataDirectory();
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
index 8beb6ed..f87f149 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionAdminRequest.java
@@ -613,6 +613,44 @@ public abstract class CollectionAdminRequest<T extends CollectionAdminResponse>
}
+ public static class MoveReplica extends AsyncCollectionAdminRequest {
+ String collection, replica, targetNode;
+ String shard, fromNode;
+ boolean randomlyMoveReplica;
+
+ public MoveReplica(String collection, String replica, String targetNode) {
+ super(CollectionAction.MOVEREPLICA);
+ this.collection = collection;
+ this.replica = replica;
+ this.targetNode = targetNode;
+ this.randomlyMoveReplica = false;
+ }
+
+ public MoveReplica(String collection, String shard, String fromNode, String targetNode) {
+ super(CollectionAction.MOVEREPLICA);
+ this.collection = collection;
+ this.shard = shard;
+ this.fromNode = fromNode;
+ this.targetNode = targetNode;
+ this.randomlyMoveReplica = true;
+ }
+
+ @Override
+ public SolrParams getParams() {
+ ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+ params.set("collection", collection);
+ params.set("targetNode", targetNode);
+ if (randomlyMoveReplica) {
+ params.set("shard", shard);
+ params.set("fromNode", fromNode);
+ } else {
+ params.set("replica", replica);
+ }
+ return params;
+ }
+ }
+
+
/*
* Returns a RebalanceLeaders object to rebalance leaders for a collection
*/
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9c2ef561/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
index f1e5a52..51db039 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
@@ -80,6 +80,7 @@ public interface CollectionParams {
REQUESTSTATUS(false, LockLevel.NONE),
DELETESTATUS(false, LockLevel.NONE),
ADDREPLICA(true, LockLevel.SHARD),
+ MOVEREPLICA(true, LockLevel.SHARD),
OVERSEERSTATUS(false, LockLevel.NONE),
LIST(false, LockLevel.NONE),
CLUSTERSTATUS(false, LockLevel.NONE),
[08/12] lucene-solr:jira/solr-9959: SOLR-10426: Add shuffle Streaming
Expression
Posted by ab...@apache.org.
SOLR-10426: Add shuffle Streaming Expression
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/37b6c605
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/37b6c605
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/37b6c605
Branch: refs/heads/jira/solr-9959
Commit: 37b6c60548f3288ee057dbd8ce1e1594ab48d314
Parents: dbd22a6
Author: Joel Bernstein <jb...@apache.org>
Authored: Wed Apr 5 17:57:11 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Wed Apr 5 17:57:24 2017 -0400
----------------------------------------------------------------------
.../org/apache/solr/handler/StreamHandler.java | 39 +------
.../client/solrj/io/stream/CloudSolrStream.java | 13 ++-
.../client/solrj/io/stream/ShuffleStream.java | 103 +++++++++++++++++++
.../solrj/io/stream/StreamExpressionTest.java | 86 +++++++++++++++-
4 files changed, 199 insertions(+), 42 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37b6c605/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 599924e..8f123ec 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -74,43 +74,7 @@ import org.apache.solr.client.solrj.io.ops.ConcatOperation;
import org.apache.solr.client.solrj.io.ops.DistinctOperation;
import org.apache.solr.client.solrj.io.ops.GroupOperation;
import org.apache.solr.client.solrj.io.ops.ReplaceOperation;
-import org.apache.solr.client.solrj.io.stream.CartesianProductStream;
-import org.apache.solr.client.solrj.io.stream.CloudSolrStream;
-import org.apache.solr.client.solrj.io.stream.CommitStream;
-import org.apache.solr.client.solrj.io.stream.ComplementStream;
-import org.apache.solr.client.solrj.io.stream.DaemonStream;
-import org.apache.solr.client.solrj.io.stream.ExceptionStream;
-import org.apache.solr.client.solrj.io.stream.ExecutorStream;
-import org.apache.solr.client.solrj.io.stream.FacetStream;
-import org.apache.solr.client.solrj.io.stream.FeaturesSelectionStream;
-import org.apache.solr.client.solrj.io.stream.FetchStream;
-import org.apache.solr.client.solrj.io.stream.HashJoinStream;
-import org.apache.solr.client.solrj.io.stream.HavingStream;
-import org.apache.solr.client.solrj.io.stream.InnerJoinStream;
-import org.apache.solr.client.solrj.io.stream.IntersectStream;
-import org.apache.solr.client.solrj.io.stream.JDBCStream;
-import org.apache.solr.client.solrj.io.stream.LeftOuterJoinStream;
-import org.apache.solr.client.solrj.io.stream.MergeStream;
-import org.apache.solr.client.solrj.io.stream.ModelStream;
-import org.apache.solr.client.solrj.io.stream.NullStream;
-import org.apache.solr.client.solrj.io.stream.OuterHashJoinStream;
-import org.apache.solr.client.solrj.io.stream.ParallelStream;
-import org.apache.solr.client.solrj.io.stream.PriorityStream;
-import org.apache.solr.client.solrj.io.stream.RandomStream;
-import org.apache.solr.client.solrj.io.stream.RankStream;
-import org.apache.solr.client.solrj.io.stream.ReducerStream;
-import org.apache.solr.client.solrj.io.stream.RollupStream;
-import org.apache.solr.client.solrj.io.stream.ScoreNodesStream;
-import org.apache.solr.client.solrj.io.stream.SelectStream;
-import org.apache.solr.client.solrj.io.stream.SignificantTermsStream;
-import org.apache.solr.client.solrj.io.stream.SortStream;
-import org.apache.solr.client.solrj.io.stream.StatsStream;
-import org.apache.solr.client.solrj.io.stream.StreamContext;
-import org.apache.solr.client.solrj.io.stream.TextLogitStream;
-import org.apache.solr.client.solrj.io.stream.TopicStream;
-import org.apache.solr.client.solrj.io.stream.TupleStream;
-import org.apache.solr.client.solrj.io.stream.UniqueStream;
-import org.apache.solr.client.solrj.io.stream.UpdateStream;
+import org.apache.solr.client.solrj.io.stream.*;
import org.apache.solr.client.solrj.io.stream.expr.Explanation;
import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
import org.apache.solr.client.solrj.io.stream.expr.Expressible;
@@ -223,6 +187,7 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
.withFunctionName("priority", PriorityStream.class)
.withFunctionName("significantTerms", SignificantTermsStream.class)
.withFunctionName("cartesianProduct", CartesianProductStream.class)
+ .withFunctionName("shuffle", ShuffleStream.class)
// metrics
.withFunctionName("min", MinMetric.class)
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37b6c605/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
index 1acd79d..7161dc4 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java
@@ -80,7 +80,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
protected String zkHost;
protected String collection;
protected SolrParams params;
- private Map<String, String> fieldMappings;
+ protected Map<String, String> fieldMappings;
protected StreamComparator comp;
private boolean trace;
protected transient Map<String, Tuple> eofTuples;
@@ -191,7 +191,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
// functionName(collectionName, param1, param2, ..., paramN, sort="comp", [aliases="field=alias,..."])
// function name
- StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+ StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass()));
// collection
expression.addParameter(collection);
@@ -254,7 +254,7 @@ public class CloudSolrStream extends TupleStream implements Expressible {
return explanation;
}
- private void init(String collectionName, String zkHost, SolrParams params) throws IOException {
+ protected void init(String collectionName, String zkHost, SolrParams params) throws IOException {
this.zkHost = zkHost;
this.collection = collectionName;
this.params = new ModifiableSolrParams(params);
@@ -405,7 +405,8 @@ public class CloudSolrStream extends TupleStream implements Expressible {
Collection<Slice> slices = CloudSolrStream.getSlices(this.collection, zkStateReader, true);
- ModifiableSolrParams mParams = new ModifiableSolrParams(params);
+ ModifiableSolrParams mParams = new ModifiableSolrParams(params);
+ mParams = adjustParams(mParams);
mParams.set(DISTRIB, "false"); // We are the aggregator.
Set<String> liveNodes = clusterState.getLiveNodes();
@@ -571,4 +572,8 @@ public class CloudSolrStream extends TupleStream implements Expressible {
}
}
}
+
+ protected ModifiableSolrParams adjustParams(ModifiableSolrParams params) {
+ return params;
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37b6c605/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java
new file mode 100644
index 0000000..d30918b
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+
+
+public class ShuffleStream extends CloudSolrStream implements Expressible {
+
+ public ShuffleStream(StreamExpression expression, StreamFactory factory) throws IOException {
+ // grab all parameters out
+ String collectionName = factory.getValueOperand(expression, 0);
+ List<StreamExpressionNamedParameter> namedParams = factory.getNamedOperands(expression);
+ StreamExpressionNamedParameter aliasExpression = factory.getNamedOperand(expression, "aliases");
+ StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
+
+ // Collection Name
+ if(null == collectionName){
+ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - collectionName expected as first operand",expression));
+ }
+
+ // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't need to count it twice
+ if(expression.getParameters().size() != 1 + namedParams.size()){
+ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - unknown operands found",expression));
+ }
+
+ // Named parameters - passed directly to solr as solrparams
+ if(0 == namedParams.size()){
+ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - at least one named parameter expected. eg. 'q=*:*'",expression));
+ }
+
+ ModifiableSolrParams mParams = new ModifiableSolrParams();
+ for(StreamExpressionNamedParameter namedParam : namedParams){
+ if(!namedParam.getName().equals("zkHost") && !namedParam.getName().equals("aliases")){
+ mParams.add(namedParam.getName(), namedParam.getParameter().toString().trim());
+ }
+ }
+
+ // Aliases, optional, if provided then need to split
+ if(null != aliasExpression && aliasExpression.getParameter() instanceof StreamExpressionValue){
+ fieldMappings = new HashMap<>();
+ for(String mapping : ((StreamExpressionValue)aliasExpression.getParameter()).getValue().split(",")){
+ String[] parts = mapping.trim().split("=");
+ if(2 == parts.length){
+ fieldMappings.put(parts[0], parts[1]);
+ }
+ else{
+ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - alias expected of the format origName=newName",expression));
+ }
+ }
+ }
+
+ // zkHost, optional - if not provided then will look into factory list to get
+ String zkHost = null;
+ if(null == zkHostExpression){
+ zkHost = factory.getCollectionZkHost(collectionName);
+ if(zkHost == null) {
+ zkHost = factory.getDefaultZkHost();
+ }
+ }
+ else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
+ zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue();
+ }
+ if(null == zkHost){
+ throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName));
+ }
+
+ // We've got all the required items
+ init(collectionName, zkHost, mParams);
+ }
+
+ public ModifiableSolrParams adjustParams(ModifiableSolrParams mParams) {
+ mParams.set(CommonParams.QT, "/export");
+ return mParams;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37b6c605/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 581013f..bb0bd7e 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -1096,7 +1096,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertTrue("blah blah blah 9".equals(t.getString("subject")));
//Change the batch size
- stream = factory.constructStream("fetch("+ COLLECTIONORALIAS +", search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\")");
+ stream = factory.constructStream("fetch(" + COLLECTIONORALIAS + ", search(" + COLLECTIONORALIAS + ", q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc\"), on=\"id=a_i\", batchSize=\"3\", fl=\"subject\")");
context = new StreamContext();
context.setSolrClientCache(solrClientCache);
stream.setStreamContext(context);
@@ -1603,6 +1603,90 @@ public class StreamExpressionTest extends SolrCloudTestCase {
}
@Test
+ public void testParallelShuffleStream() throws Exception {
+
+ new UpdateRequest()
+ .add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "0")
+ .add(id, "2", "a_s", "hello2", "a_i", "2", "a_f", "0")
+ .add(id, "3", "a_s", "hello3", "a_i", "3", "a_f", "3")
+ .add(id, "4", "a_s", "hello4", "a_i", "4", "a_f", "4")
+ .add(id, "1", "a_s", "hello1", "a_i", "1", "a_f", "1")
+ .add(id, "5", "a_s", "hello1", "a_i", "10", "a_f", "1")
+ .add(id, "6", "a_s", "hello1", "a_i", "11", "a_f", "5")
+ .add(id, "7", "a_s", "hello1", "a_i", "12", "a_f", "5")
+ .add(id, "8", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "9", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "10", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "11", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "12", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "13", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "14", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "15", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "16", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "17", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "18", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "19", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "20", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "21", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "22", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "23", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "24", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "25", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "26", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "27", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "28", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "29", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "30", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "31", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "32", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "33", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "34", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "35", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "36", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "37", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "38", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "39", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "40", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "41", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "42", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "43", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "44", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "45", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "46", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "47", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "48", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "49", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "50", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "51", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "52", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "53", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "54", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "55", "a_s", "hello1", "a_i", "13", "a_f", "4")
+ .add(id, "56", "a_s", "hello1", "a_i", "13", "a_f", "1000")
+
+ .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+ String zkHost = cluster.getZkServer().getZkAddress();
+ StreamFactory streamFactory = new StreamFactory().withCollectionZkHost(COLLECTIONORALIAS, zkHost)
+ .withFunctionName("shuffle", ShuffleStream.class)
+ .withFunctionName("unique", UniqueStream.class)
+ .withFunctionName("parallel", ParallelStream.class);
+
+ ParallelStream pstream = (ParallelStream)streamFactory.constructStream("parallel(" + COLLECTIONORALIAS + ", unique(shuffle(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", partitionKeys=\"a_f\"), over=\"a_f\"), workers=\"2\", zkHost=\""+zkHost+"\", sort=\"a_f asc\")");
+
+ List<Tuple> tuples = getTuples(pstream);
+ assert(tuples.size() == 6);
+ assertOrder(tuples, 0, 1, 3, 4, 6, 56);
+
+ //Test the eofTuples
+
+ Map<String,Tuple> eofTuples = pstream.getEofTuples();
+ assert(eofTuples.size() == 2); //There should be an EOF tuple for each worker.
+ assert(pstream.toExpression(streamFactory).toString().contains("shuffle"));
+ }
+
+
+ @Test
public void testParallelReducerStream() throws Exception {
new UpdateRequest()
[10/12] lucene-solr:jira/solr-9959: SOLR-10239: change empty lambda
to null
Posted by ab...@apache.org.
SOLR-10239: change empty lambda to null
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c30a7dc5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c30a7dc5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c30a7dc5
Branch: refs/heads/jira/solr-9959
Commit: c30a7dc597f02f68c0f4b101a60e31b69d2d616e
Parents: 9c2ef56
Author: Cao Manh Dat <da...@apache.org>
Authored: Thu Apr 6 15:57:43 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Thu Apr 6 15:57:43 2017 +0700
----------------------------------------------------------------------
solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c30a7dc5/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
index 09d3b79..545989e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/MoveReplicaCmd.java
@@ -122,7 +122,7 @@ public class MoveReplicaCmd implements Cmd{
removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_INDEX, false);
if(async!=null) removeReplicasProps.getProperties().put(ASYNC, async);
NamedList deleteResult = new NamedList();
- ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, ()->{});
+ ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, null);
if (deleteResult.get("failure") != null) {
String errorString = String.format(Locale.ROOT, "Failed to cleanup replica collection=%s shard=%s name=%s",
coll.getName(), slice.getName(), replica.getName());
@@ -139,7 +139,7 @@ public class MoveReplicaCmd implements Cmd{
CoreAdminParams.DATA_DIR, dataDir);
if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
NamedList addResult = new NamedList();
- ocmh.addReplica(clusterState, addReplicasProps, addResult, ()->{});
+ ocmh.addReplica(clusterState, addReplicasProps, addResult, null);
if (addResult.get("failure") != null) {
String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
" on node=%s", coll.getName(), slice.getName(), targetNode);
@@ -163,7 +163,7 @@ public class MoveReplicaCmd implements Cmd{
CoreAdminParams.NAME, newCoreName);
if(async!=null) addReplicasProps.getProperties().put(ASYNC, async);
NamedList addResult = new NamedList();
- ocmh.addReplica(clusterState, addReplicasProps, addResult, ()->{});
+ ocmh.addReplica(clusterState, addReplicasProps, addResult, null);
if (addResult.get("failure") != null) {
String errorString = String.format(Locale.ROOT, "Failed to create replica for collection=%s shard=%s" +
" on node=%s", coll.getName(), slice.getName(), targetNode);
@@ -178,7 +178,7 @@ public class MoveReplicaCmd implements Cmd{
REPLICA_PROP, replica.getName());
if(async!=null) removeReplicasProps.getProperties().put(ASYNC, async);
NamedList deleteResult = new NamedList();
- ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, ()->{});
+ ocmh.deleteReplica(clusterState, removeReplicasProps, deleteResult, null);
if (deleteResult.get("failure") != null) {
String errorString = String.format(Locale.ROOT, "Failed to cleanup replica collection=%s shard=%s name=%s",
coll.getName(), slice.getName(), replica.getName());
[11/12] lucene-solr:jira/solr-9959: SOLR-10239: Update CHANGES.txt
Posted by ab...@apache.org.
SOLR-10239: Update CHANGES.txt
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ad132669
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ad132669
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ad132669
Branch: refs/heads/jira/solr-9959
Commit: ad1326698d600e89c6f745f3817acc4b6a3f84e7
Parents: c30a7dc
Author: Cao Manh Dat <da...@apache.org>
Authored: Thu Apr 6 16:02:20 2017 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Thu Apr 6 16:02:20 2017 +0700
----------------------------------------------------------------------
solr/CHANGES.txt | 2 ++
1 file changed, 2 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ad132669/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 214d6bb..cebdac5 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -137,6 +137,8 @@ New Features
* SOLR-9993: Add support for ExpandComponent with PointFields. (Cao Manh Dat)
+* SOLR-10239: MOVEREPLICA API (Cao Manh Dat, Noble Paul, shalin)
+
Optimizations
----------------------
[05/12] lucene-solr:jira/solr-9959: SOLR-10404: fetch() streaming
expression: escape values in generated query.
Posted by ab...@apache.org.
SOLR-10404: fetch() streaming expression: escape values in generated query.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/cb9f151d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/cb9f151d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/cb9f151d
Branch: refs/heads/jira/solr-9959
Commit: cb9f151db4b5ad5c5f581b6b8cf2e5916ddb0f35
Parents: 50ed729
Author: David Smiley <ds...@apache.org>
Authored: Wed Apr 5 08:56:50 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Wed Apr 5 08:56:50 2017 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
.../client/solrj/io/stream/FetchStream.java | 20 +++++++-------------
.../solrj/io/stream/StreamExpressionTest.java | 18 +++++++++++++++++-
3 files changed, 27 insertions(+), 14 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cb9f151d/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index bb623dc..ea825d9 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -213,6 +213,9 @@ Bug Fixes
* SOLR-10421: Fix params persistence for solr/contrib/ltr (MinMax|Standard)Normalizer classes.
(Jianxiong Dong, Christine Poerschke)
+* SOLR-10404: The fetch() streaming expression wouldn't work if a value included query syntax chars (like :+-).
+ Fixed, and enhanced the generated query to not pollute the queryCache. (David Smiley)
+
================== 6.5.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cb9f151d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
index 06e6fdc..2cd60ec 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FetchStream.java
@@ -35,6 +35,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.params.ModifiableSolrParams;
import static org.apache.solr.common.params.CommonParams.SORT;
@@ -208,9 +209,8 @@ public class FetchStream extends TupleStream implements Expressible {
}
private void fetchBatch() throws IOException {
-
Tuple EOFTuple = null;
- List<Tuple> batch = new ArrayList();
+ List<Tuple> batch = new ArrayList<>(batchSize);
for(int i=0; i<batchSize; i++) {
Tuple tuple = stream.read();
if(tuple.EOF) {
@@ -222,18 +222,12 @@ public class FetchStream extends TupleStream implements Expressible {
}
if(batch.size() > 0) {
- StringBuilder buf = new StringBuilder();
- buf.append(rightKey);
- buf.append(":(");
- for (int i = 0; i < batch.size(); i++) {
- if (i > 0) {
- buf.append(" ");
- }
- Tuple tuple = batch.get(i);
+ StringBuilder buf = new StringBuilder(batch.size() * 10 + 20);
+ buf.append("{! df=").append(rightKey).append(" q.op=OR cache=false }");//disable queryCache
+ for (Tuple tuple : batch) {
String key = tuple.getString(leftKey);
- buf.append(key);
+ buf.append(' ').append(ClientUtils.escapeQueryChars(key));
}
- buf.append(")");
ModifiableSolrParams params = new ModifiableSolrParams();
params.add("q", buf.toString());
@@ -245,7 +239,7 @@ public class FetchStream extends TupleStream implements Expressible {
StreamContext newContext = new StreamContext();
newContext.setSolrClientCache(streamContext.getSolrClientCache());
cloudSolrStream.setStreamContext(newContext);
- Map<String, Tuple> fetched = new HashMap();
+ Map<String, Tuple> fetched = new HashMap<>();
try {
cloudSolrStream.open();
while (true) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cb9f151d/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index f153a1b..581013f 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -1044,7 +1044,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
@Test
public void testFetchStream() throws Exception {
- SolrClientCache solrClientCache = new SolrClientCache();
+ SolrClientCache solrClientCache = new SolrClientCache();//TODO share in @Before ; close in @After ?
new UpdateRequest()
.add(id, "0", "a_s", "hello0", "a_i", "0", "a_f", "1", "subject", "blah blah blah 0")
@@ -1123,6 +1123,22 @@ public class StreamExpressionTest extends SolrCloudTestCase {
assertTrue("blah blah blah 8".equals(t.getString("subject")));
t = tuples.get(9);
assertTrue("blah blah blah 9".equals(t.getString("subject")));
+
+ // SOLR-10404 test that "hello 99" as a value gets escaped
+ new UpdateRequest()
+ .add(id, "99", "a1_s", "hello 99", "a2_s", "hello 99", "subject", "blah blah blah 99")
+ .commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+ stream = factory.constructStream("fetch("+ COLLECTIONORALIAS +", search(" + COLLECTIONORALIAS + ", q=" + id + ":99, fl=\"id,a1_s\", sort=\"id asc\"), on=\"a1_s=a2_s\", fl=\"subject\")");
+ context = new StreamContext();
+ context.setSolrClientCache(solrClientCache);
+ stream.setStreamContext(context);
+ tuples = getTuples(stream);
+
+ assertEquals(1, tuples.size());
+ t = tuples.get(0);
+ assertTrue("blah blah blah 99".equals(t.getString("subject")));
+
solrClientCache.close();
}
[12/12] lucene-solr:jira/solr-9959: Merge branch 'master' into
jira/solr-9959
Posted by ab...@apache.org.
Merge branch 'master' into jira/solr-9959
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d503fc8c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d503fc8c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d503fc8c
Branch: refs/heads/jira/solr-9959
Commit: d503fc8c13399976227ed66fd9f915939f4af97f
Parents: a8d4e67 ad13266
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Thu Apr 6 15:21:10 2017 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Thu Apr 6 15:21:10 2017 +0200
----------------------------------------------------------------------
.../lucene/document/InetAddressRange.java | 14 +-
.../search/TestInetAddressRangeQueries.java | 100 +++----
solr/CHANGES.txt | 18 ++
.../apache/solr/ltr/norm/MinMaxNormalizer.java | 4 +-
.../solr/ltr/norm/StandardNormalizer.java | 4 +-
.../LTRFeatureLoggerTransformerFactory.java | 5 -
.../solr/ltr/search/LTRQParserPlugin.java | 5 -
.../solr/ltr/norm/TestMinMaxNormalizer.java | 1 +
.../solr/ltr/norm/TestStandardNormalizer.java | 1 +
.../org/apache/solr/cloud/MoveReplicaCmd.java | 193 ++++++++++++++
.../java/org/apache/solr/cloud/Overseer.java | 2 +-
.../cloud/OverseerCollectionMessageHandler.java | 1 +
.../apache/solr/cloud/overseer/NodeMutator.java | 29 +-
.../solr/cloud/overseer/ZkWriteCommand.java | 5 +
.../org/apache/solr/handler/StreamHandler.java | 39 +--
.../solr/handler/admin/CollectionsHandler.java | 12 +-
.../org/apache/solr/parser/QueryParser.java | 5 +-
.../java/org/apache/solr/parser/QueryParser.jj | 5 +-
.../apache/solr/parser/SolrQueryParserBase.java | 23 +-
.../java/org/apache/solr/schema/FieldType.java | 4 +-
.../java/org/apache/solr/schema/TextField.java | 13 +-
.../solr/search/ExtendedDismaxQParser.java | 5 +-
.../solr/collection1/conf/schema12.xml | 11 +-
.../apache/solr/cloud/ClusterStateMockUtil.java | 233 ++++++++++++++++
.../CollectionsAPIAsyncDistributedZkTest.java | 16 +-
.../org/apache/solr/cloud/MoveReplicaTest.java | 125 +++++++++
.../org/apache/solr/cloud/NodeMutatorTest.java | 95 +++++++
.../SharedFSAutoReplicaFailoverUtilsTest.java | 263 ++-----------------
.../HdfsCollectionsAPIDistributedZkTest.java | 114 ++++++++
.../solr/search/TestExtendedDismaxParser.java | 18 ++
.../apache/solr/search/TestSolrQueryParser.java | 18 ++
.../client/solrj/io/stream/CloudSolrStream.java | 13 +-
.../client/solrj/io/stream/FetchStream.java | 20 +-
.../client/solrj/io/stream/ShuffleStream.java | 103 ++++++++
.../solrj/request/CollectionAdminRequest.java | 38 +++
.../solr/common/params/CollectionParams.java | 1 +
.../solrj/io/stream/StreamExpressionTest.java | 104 +++++++-
.../web/js/angular/controllers/documents.js | 2 -
solr/webapp/web/js/scripts/documents.js | 8 -
solr/webapp/web/partials/documents.html | 7 -
solr/webapp/web/tpl/documents.html | 7 -
41 files changed, 1255 insertions(+), 429 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d503fc8c/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
----------------------------------------------------------------------
[03/12] lucene-solr:jira/solr-9959: SOLR-10421: Fix params
persistence for solr/contrib/ltr (MinMax|Standard)Normalizer classes.
(Jianxiong Dong, Christine Poerschke)
Posted by ab...@apache.org.
SOLR-10421: Fix params persistence for solr/contrib/ltr (MinMax|Standard)Normalizer classes.
(Jianxiong Dong, Christine Poerschke)
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dfa342bc
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dfa342bc
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dfa342bc
Branch: refs/heads/jira/solr-9959
Commit: dfa342bc90f8f3012e26169286ee61d42e9b15cf
Parents: 6030302
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Apr 5 11:50:13 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Apr 5 11:50:13 2017 +0100
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +++
.../ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java | 4 ++--
.../src/java/org/apache/solr/ltr/norm/StandardNormalizer.java | 4 ++--
.../src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java | 1 +
.../test/org/apache/solr/ltr/norm/TestStandardNormalizer.java | 1 +
5 files changed, 9 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfa342bc/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 3540315..bb623dc 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -210,6 +210,9 @@ Bug Fixes
* SOLR-10277: On 'downnode', lots of wasteful mutations are done to ZK.
(Joshua Humphries, Scott Blum, Varun Thacker, shalin)
+* SOLR-10421: Fix params persistence for solr/contrib/ltr (MinMax|Standard)Normalizer classes.
+ (Jianxiong Dong, Christine Poerschke)
+
================== 6.5.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfa342bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
index 92e233c..ff31c01 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java
@@ -90,8 +90,8 @@ public class MinMaxNormalizer extends Normalizer {
@Override
public LinkedHashMap<String,Object> paramsToMap() {
final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
- params.put("min", min);
- params.put("max", max);
+ params.put("min", '"'+Float.toString(min)+'"');
+ params.put("max", '"'+Float.toString(max)+'"');
return params;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfa342bc/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
index 7ab525c..57df7b4 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java
@@ -82,8 +82,8 @@ public class StandardNormalizer extends Normalizer {
@Override
public LinkedHashMap<String,Object> paramsToMap() {
final LinkedHashMap<String,Object> params = new LinkedHashMap<>(2, 1.0f);
- params.put("avg", avg);
- params.put("std", std);
+ params.put("avg", '"'+Float.toString(avg)+'"');
+ params.put("std", '"'+Float.toString(std)+'"');
return params;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfa342bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
index 055b3bc..794e393 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
@@ -40,6 +40,7 @@ public class TestMinMaxNormalizer {
final MinMaxNormalizer mmn = (MinMaxNormalizer)n;
assertEquals(mmn.getMin(), expectedMin, 0.0);
assertEquals(mmn.getMax(), expectedMax, 0.0);
+ assertEquals("{min=\""+expectedMin+"\", max=\""+expectedMax+"\"}", mmn.paramsToMap().toString());
return n;
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfa342bc/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
index 10fa972..1794686 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
@@ -40,6 +40,7 @@ public class TestStandardNormalizer {
final StandardNormalizer sn = (StandardNormalizer)n;
assertEquals(sn.getAvg(), expectedAvg, 0.0);
assertEquals(sn.getStd(), expectedStd, 0.0);
+ assertEquals("{avg=\""+expectedAvg+"\", std=\""+expectedStd+"\"}", sn.paramsToMap().toString());
return n;
}
[07/12] lucene-solr:jira/solr-9959: SOLR-10423: Disable graph query
production via schema configuration . This fixes broken queries for
ShingleFilter-containing query-time analyzers when request param sow=false.
Posted by ab...@apache.org.
SOLR-10423: Disable graph query production via schema configuration <fieldtype ... enableGraphQueries="false">. This fixes broken queries for ShingleFilter-containing query-time analyzers when request param sow=false.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dbd22a6a
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dbd22a6a
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dbd22a6a
Branch: refs/heads/jira/solr-9959
Commit: dbd22a6ada774eb30aee4b9312eb7913dee6890e
Parents: ef8126e
Author: Steve Rowe <sa...@gmail.com>
Authored: Wed Apr 5 16:23:26 2017 -0400
Committer: Steve Rowe <sa...@gmail.com>
Committed: Wed Apr 5 16:23:26 2017 -0400
----------------------------------------------------------------------
solr/CHANGES.txt | 4 ++++
.../org/apache/solr/parser/QueryParser.java | 5 +++--
.../java/org/apache/solr/parser/QueryParser.jj | 5 +++--
.../apache/solr/parser/SolrQueryParserBase.java | 23 ++++++++++++++------
.../java/org/apache/solr/schema/FieldType.java | 4 +++-
.../java/org/apache/solr/schema/TextField.java | 13 ++++++++++-
.../solr/search/ExtendedDismaxQParser.java | 5 +++--
.../solr/collection1/conf/schema12.xml | 11 +++++++++-
.../solr/search/TestExtendedDismaxParser.java | 18 +++++++++++++++
.../apache/solr/search/TestSolrQueryParser.java | 18 +++++++++++++++
10 files changed, 90 insertions(+), 16 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index ea825d9..214d6bb 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -215,6 +215,10 @@ Bug Fixes
* SOLR-10404: The fetch() streaming expression wouldn't work if a value included query syntax chars (like :+-).
Fixed, and enhanced the generated query to not pollute the queryCache. (David Smiley)
+
+* SOLR-10423: Disable graph query production via schema configuration <fieldtype ... enableGraphQueries="false">.
+ This fixes broken queries for ShingleFilter-containing query-time analyzers when request param sow=false.
+ (Steve Rowe)
================== 6.5.0 ==================
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/java/org/apache/solr/parser/QueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/QueryParser.java b/solr/core/src/java/org/apache/solr/parser/QueryParser.java
index e846c6e..2b64b88 100644
--- a/solr/core/src/java/org/apache/solr/parser/QueryParser.java
+++ b/solr/core/src/java/org/apache/solr/parser/QueryParser.java
@@ -52,12 +52,13 @@ public class QueryParser extends SolrQueryParserBase implements QueryParserConst
@Override
protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
- boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+ boolean quoted, boolean fieldAutoGenPhraseQueries, boolean fieldEnableGraphQueries)
+ throws SyntaxError {
setAutoGenerateMultiTermSynonymsPhraseQuery(fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries());
// Don't auto-quote graph-aware field queries
boolean treatAsQuoted = getSplitOnWhitespace()
? (quoted || fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries()) : quoted;
- return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false);
+ return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false, fieldEnableGraphQueries);
}
// * Query ::= ( Clause )*
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj b/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
index d4d6539..c07b28d 100644
--- a/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
+++ b/solr/core/src/java/org/apache/solr/parser/QueryParser.jj
@@ -76,12 +76,13 @@ public class QueryParser extends SolrQueryParserBase {
@Override
protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
- boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+ boolean quoted, boolean fieldAutoGenPhraseQueries, boolean fieldEnableGraphQueries)
+ throws SyntaxError {
setAutoGenerateMultiTermSynonymsPhraseQuery(fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries());
// Don't auto-quote graph-aware field queries
boolean treatAsQuoted = getSplitOnWhitespace()
? (quoted || fieldAutoGenPhraseQueries || getAutoGeneratePhraseQueries()) : quoted;
- return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false);
+ return super.newFieldQuery(analyzer, field, queryText, treatAsQuoted, false, fieldEnableGraphQueries);
}
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
index 08ccdd1..21e0aa0 100644
--- a/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
+++ b/solr/core/src/java/org/apache/solr/parser/SolrQueryParserBase.java
@@ -437,10 +437,14 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
}
protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
- boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+ boolean quoted, boolean fieldAutoGenPhraseQueries, boolean fieldEnableGraphQueries)
+ throws SyntaxError {
BooleanClause.Occur occur = operator == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD;
- return createFieldQuery(analyzer, occur, field, queryText,
+ setEnableGraphQueries(fieldEnableGraphQueries);
+ Query query = createFieldQuery(analyzer, occur, field, queryText,
quoted || fieldAutoGenPhraseQueries || autoGeneratePhraseQueries, phraseSlop);
+ setEnableGraphQueries(true); // reset back to default
+ return query;
}
@@ -632,8 +636,10 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
Query subq;
if (ft.isTokenized() && sfield.indexed()) {
boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
+ boolean fieldEnableGraphQueries = ft instanceof TextField && ((TextField)ft).getEnableGraphQueries();
+
subq = newFieldQuery(getAnalyzer(), sfield.getName(), rawq.getJoinedExternalVal(),
- false, fieldAutoGenPhraseQueries);
+ false, fieldAutoGenPhraseQueries, fieldEnableGraphQueries);
booleanBuilder.add(subq, BooleanClause.Occur.SHOULD);
} else {
for (String externalVal : rawq.getExternalVals()) {
@@ -949,7 +955,8 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
// delegate to type for everything except tokenized fields
if (ft.isTokenized() && sf.indexed()) {
boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
- return newFieldQuery(getAnalyzer(), field, queryText, quoted, fieldAutoGenPhraseQueries);
+ boolean fieldEnableGraphQueries = ft instanceof TextField && ((TextField)ft).getEnableGraphQueries();
+ return newFieldQuery(getAnalyzer(), field, queryText, quoted, fieldAutoGenPhraseQueries, fieldEnableGraphQueries);
} else {
if (raw) {
return new RawQuery(sf, queryText);
@@ -960,7 +967,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
}
// default to a normal field query
- return newFieldQuery(getAnalyzer(), field, queryText, quoted, false);
+ return newFieldQuery(getAnalyzer(), field, queryText, quoted, false, true);
}
// Assumption: quoted is always false
@@ -993,7 +1000,9 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
if (ft.isTokenized() && sf.indexed()) {
String queryText = queryTerms.size() == 1 ? queryTerms.get(0) : String.join(" ", queryTerms);
boolean fieldAutoGenPhraseQueries = ft instanceof TextField && ((TextField)ft).getAutoGeneratePhraseQueries();
- return newFieldQuery(getAnalyzer(), field, queryText, false, fieldAutoGenPhraseQueries);
+ boolean fieldEnableGraphQueries = ft instanceof TextField && ((TextField)ft).getEnableGraphQueries();
+ return newFieldQuery
+ (getAnalyzer(), field, queryText, false, fieldAutoGenPhraseQueries, fieldEnableGraphQueries);
} else {
if (raw) {
return new RawQuery(sf, queryTerms);
@@ -1025,7 +1034,7 @@ public abstract class SolrQueryParserBase extends QueryBuilder {
// default to a normal field query
String queryText = queryTerms.size() == 1 ? queryTerms.get(0) : String.join(" ", queryTerms);
- return newFieldQuery(getAnalyzer(), field, queryText, false, false);
+ return newFieldQuery(getAnalyzer(), field, queryText, false, false, true);
}
protected boolean isRangeShouldBeProtectedFromReverse(String field, String part1){
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/java/org/apache/solr/schema/FieldType.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java
index 016e166..07eb866 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldType.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java
@@ -833,7 +833,8 @@ public abstract class FieldType extends FieldProperties {
private static final String POSTINGS_FORMAT = "postingsFormat";
private static final String DOC_VALUES_FORMAT = "docValuesFormat";
- private static final String AUTO_GENERATE_PHRASE_QUERIES = "autoGeneratePhraseQueries";
+ protected static final String AUTO_GENERATE_PHRASE_QUERIES = "autoGeneratePhraseQueries";
+ protected static final String ENABLE_GRAPH_QUERIES = "enableGraphQueries";
private static final String ARGS = "args";
private static final String POSITION_INCREMENT_GAP = "positionIncrementGap";
@@ -856,6 +857,7 @@ public abstract class FieldType extends FieldProperties {
}
if (this instanceof TextField) {
namedPropertyValues.add(AUTO_GENERATE_PHRASE_QUERIES, ((TextField) this).getAutoGeneratePhraseQueries());
+ namedPropertyValues.add(ENABLE_GRAPH_QUERIES, ((TextField) this).getEnableGraphQueries());
}
namedPropertyValues.add(getPropertyName(INDEXED), hasProperty(INDEXED));
namedPropertyValues.add(getPropertyName(STORED), hasProperty(STORED));
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/java/org/apache/solr/schema/TextField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/TextField.java b/solr/core/src/java/org/apache/solr/schema/TextField.java
index 3d34df3..d8bae24 100644
--- a/solr/core/src/java/org/apache/solr/schema/TextField.java
+++ b/solr/core/src/java/org/apache/solr/schema/TextField.java
@@ -41,6 +41,7 @@ import org.apache.solr.uninverting.UninvertingReader.Type;
*/
public class TextField extends FieldType {
protected boolean autoGeneratePhraseQueries;
+ protected boolean enableGraphQueries;
/**
* Analyzer set by schema for text types to use when searching fields
@@ -69,9 +70,15 @@ public class TextField extends FieldType {
} else {
autoGeneratePhraseQueries = true;
}
- String autoGeneratePhraseQueriesStr = args.remove("autoGeneratePhraseQueries");
+ String autoGeneratePhraseQueriesStr = args.remove(AUTO_GENERATE_PHRASE_QUERIES);
if (autoGeneratePhraseQueriesStr != null)
autoGeneratePhraseQueries = Boolean.parseBoolean(autoGeneratePhraseQueriesStr);
+
+ enableGraphQueries = true;
+ String enableGraphQueriesStr = args.remove(ENABLE_GRAPH_QUERIES);
+ if (enableGraphQueriesStr != null)
+ enableGraphQueries = Boolean.parseBoolean(enableGraphQueriesStr);
+
super.init(schema, args);
}
@@ -93,6 +100,10 @@ public class TextField extends FieldType {
public boolean getAutoGeneratePhraseQueries() {
return autoGeneratePhraseQueries;
}
+
+ public boolean getEnableGraphQueries() {
+ return enableGraphQueries;
+ }
@Override
public SortField getSortField(SchemaField field, boolean reverse) {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
index 9825f72..07d7e51 100644
--- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
@@ -1080,7 +1080,8 @@ public class ExtendedDismaxQParser extends QParser {
@Override
protected Query newFieldQuery(Analyzer analyzer, String field, String queryText,
- boolean quoted, boolean fieldAutoGenPhraseQueries) throws SyntaxError {
+ boolean quoted, boolean fieldAutoGenPhraseQueries, boolean enableGraphQueries)
+ throws SyntaxError {
Analyzer actualAnalyzer;
if (removeStopFilter) {
if (nonStopFilterAnalyzerPerField == null) {
@@ -1093,7 +1094,7 @@ public class ExtendedDismaxQParser extends QParser {
} else {
actualAnalyzer = parser.getReq().getSchema().getFieldType(field).getQueryAnalyzer();
}
- return super.newFieldQuery(actualAnalyzer, field, queryText, quoted, fieldAutoGenPhraseQueries);
+ return super.newFieldQuery(actualAnalyzer, field, queryText, quoted, fieldAutoGenPhraseQueries, enableGraphQueries);
}
@Override
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/test-files/solr/collection1/conf/schema12.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema12.xml b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
index 7ea770b..5a85c1f 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema12.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema12.xml
@@ -467,7 +467,14 @@
<fieldType name="currency" class="solr.CurrencyField" currencyConfig="currency.xml" multiValued="false"/>
-
+ <fieldType name="shingle23" class="solr.TextField" enableGraphQueries="false" multiValued="true">
+ <analyzer>
+ <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+ <filter class="solr.ShingleFilterFactory" minShingleSize="2" maxShingleSize="3"
+ tokenSeparator="_" outputUnigrams="false"/>
+ </analyzer>
+ </fieldType>
+
<field name="id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>
<field name="signatureField" type="string" indexed="true" stored="false"/>
<field name="uuid" type="uuid" stored="true"/>
@@ -568,6 +575,8 @@
<field name="uniq2" type="string" indexed="true" stored="true" multiValued="true"/>
<field name="uniq3" type="string" indexed="true" stored="true"/>
<field name="nouniq" type="string" indexed="true" stored="true" multiValued="true"/>
+
+ <field name="shingle23" type="shingle23" indexed="true" stored="true"/>
<!--
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
index c4d8bec..6c7b0cf 100644
--- a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
@@ -93,6 +93,7 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
assertU(adoc("id", "70", "text_sw", "hair"));
assertU(adoc("id", "71", "text_sw", "ties"));
assertU(adoc("id", "72", "text_sw", "wifi ATM"));
+ assertU(adoc("id", "73", "shingle23", "A B X D E"));
assertU(commit());
}
@@ -1946,4 +1947,21 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
}
}
}
+
+ @Test
+ public void testShingleQueries() throws Exception {
+ ModifiableSolrParams params = new ModifiableSolrParams();
+ params.add("sow", "false");
+ params.add("defType", "edismax");
+
+ try (SolrQueryRequest req = req(params)) {
+ QParser qParser = QParser.getParser("shingle23:(A B C)", req);
+ Query q = qParser.getQuery();
+ assertEquals("Synonym(shingle23:A_B shingle23:A_B_C) shingle23:B_C", q.toString());
+ }
+
+ assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false")
+ , "/response/numFound==1"
+ );
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dbd22a6a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
index 607f091..e1372d8 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
@@ -67,6 +67,8 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
assertU(adoc("id", "13", "eee_s", "'balance'", "rrr_s", "/leading_slash"));
assertU(adoc("id", "20", "syn", "wifi ATM"));
+
+ assertU(adoc("id", "30", "shingle23", "A B X D E"));
assertU(commit());
}
@@ -995,4 +997,20 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
}
}
}
+
+ @Test
+ public void testShingleQueries() throws Exception {
+ ModifiableSolrParams sowFalseParams = new ModifiableSolrParams();
+ sowFalseParams.add("sow", "false");
+
+ try (SolrQueryRequest req = req(sowFalseParams)) {
+ QParser qParser = QParser.getParser("shingle23:(A B C)", req);
+ Query q = qParser.getQuery();
+ assertEquals("Synonym(shingle23:A_B shingle23:A_B_C) shingle23:B_C", q.toString());
+ }
+
+ assertJQ(req("df", "shingle23", "q", "A B C", "sow", "false")
+ , "/response/numFound==1"
+ );
+ }
}
\ No newline at end of file
[06/12] lucene-solr:jira/solr-9959: LUCENE-7738: Fix min/max
verification bug in InetAddressRange to correctly compare IPv4 and IPv6.
Update tests.
Posted by ab...@apache.org.
LUCENE-7738: Fix min/max verification bug in InetAddressRange to correctly compare IPv4 and IPv6. Update tests.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/ef8126e5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/ef8126e5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/ef8126e5
Branch: refs/heads/jira/solr-9959
Commit: ef8126e5eab7aec9c8775c2e08bd6c2bb1ef690f
Parents: cb9f151
Author: Nicholas Knize <nk...@gmail.com>
Authored: Wed Apr 5 11:10:15 2017 -0500
Committer: Nicholas Knize <nk...@gmail.com>
Committed: Wed Apr 5 11:10:15 2017 -0500
----------------------------------------------------------------------
.../lucene/document/InetAddressRange.java | 14 ++-
.../search/TestInetAddressRangeQueries.java | 100 ++++++++-----------
2 files changed, 50 insertions(+), 64 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef8126e5/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java b/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
index 5fa1fb9..84f0d6b 100644
--- a/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
+++ b/lucene/misc/src/java/org/apache/lucene/document/InetAddressRange.java
@@ -68,9 +68,6 @@ public class InetAddressRange extends Field {
* @param max range max value; defined as an {@code InetAddress}
*/
public void setRangeValues(InetAddress min, InetAddress max) {
- if (StringHelper.compare(BYTES, min.getAddress(), 0, max.getAddress(), 0) > 0) {
- throw new IllegalArgumentException("min value cannot be greater than max value for range field (name=" + name + ")");
- }
final byte[] bytes;
if (fieldsData == null) {
bytes = new byte[BYTES*2];
@@ -83,8 +80,15 @@ public class InetAddressRange extends Field {
/** encode the min/max range into the provided byte array */
private static void encode(final InetAddress min, final InetAddress max, final byte[] bytes) {
- System.arraycopy(InetAddressPoint.encode(min), 0, bytes, 0, BYTES);
- System.arraycopy(InetAddressPoint.encode(max), 0, bytes, BYTES, BYTES);
+ // encode min and max value (consistent w/ InetAddressPoint encoding)
+ final byte[] minEncoded = InetAddressPoint.encode(min);
+ final byte[] maxEncoded = InetAddressPoint.encode(max);
+ // ensure min is lt max
+ if (StringHelper.compare(BYTES, minEncoded, 0, maxEncoded, 0) > 0) {
+ throw new IllegalArgumentException("min value cannot be greater than max value for InetAddressRange field");
+ }
+ System.arraycopy(minEncoded, 0, bytes, 0, BYTES);
+ System.arraycopy(maxEncoded, 0, bytes, BYTES, BYTES);
}
/** encode the min/max range and return the byte array */
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/ef8126e5/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
----------------------------------------------------------------------
diff --git a/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java b/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
index e22cf9b..252162c 100644
--- a/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
+++ b/lucene/misc/src/test/org/apache/lucene/search/TestInetAddressRangeQueries.java
@@ -19,6 +19,7 @@ package org.apache.lucene.search;
import java.net.InetAddress;
import java.net.UnknownHostException;
+import org.apache.lucene.document.InetAddressPoint;
import org.apache.lucene.document.InetAddressRange;
import org.apache.lucene.util.StringHelper;
@@ -28,16 +29,12 @@ import org.apache.lucene.util.StringHelper;
public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
private static final String FIELD_NAME = "ipRangeField";
- private IPVersion ipVersion;
-
- private enum IPVersion {IPv4, IPv6}
-
@Override
protected Range nextRange(int dimensions) throws Exception {
InetAddress min = nextInetaddress();
- byte[] bMin = min.getAddress();
+ byte[] bMin = InetAddressPoint.encode(min);
InetAddress max = nextInetaddress();
- byte[] bMax = max.getAddress();
+ byte[] bMax = InetAddressPoint.encode(max);
if (StringHelper.compare(bMin.length, bMin, 0, bMax, 0) > 0) {
return new IpRange(max, min);
}
@@ -46,89 +43,74 @@ public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
/** return random IPv4 or IPv6 address */
private InetAddress nextInetaddress() throws UnknownHostException {
- byte[] b;
- switch (ipVersion) {
- case IPv4:
- b = new byte[4];
- break;
- case IPv6:
- b = new byte[16];
- break;
- default:
- throw new IllegalArgumentException("incorrect IP version: " + ipVersion);
- }
+ byte[] b = random().nextBoolean() ? new byte[4] : new byte[16];
random().nextBytes(b);
return InetAddress.getByAddress(b);
}
- /** randomly select version across tests */
- private IPVersion ipVersion() {
- return random().nextBoolean() ? IPVersion.IPv4 : IPVersion.IPv6;
- }
-
@Override
public void testRandomTiny() throws Exception {
- ipVersion = ipVersion();
super.testRandomTiny();
}
@Override
public void testMultiValued() throws Exception {
- ipVersion = ipVersion();
super.testRandomMedium();
}
@Override
public void testRandomMedium() throws Exception {
- ipVersion = ipVersion();
super.testMultiValued();
}
@Nightly
@Override
public void testRandomBig() throws Exception {
- ipVersion = ipVersion();
super.testRandomBig();
}
/** return random range */
@Override
protected InetAddressRange newRangeField(Range r) {
- return new InetAddressRange(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+ return new InetAddressRange(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
}
/** return random intersects query */
@Override
protected Query newIntersectsQuery(Range r) {
- return InetAddressRange.newIntersectsQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+ return InetAddressRange.newIntersectsQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
}
/** return random contains query */
@Override
protected Query newContainsQuery(Range r) {
- return InetAddressRange.newContainsQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+ return InetAddressRange.newContainsQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
}
/** return random within query */
@Override
protected Query newWithinQuery(Range r) {
- return InetAddressRange.newWithinQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+ return InetAddressRange.newWithinQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
}
/** return random crosses query */
@Override
protected Query newCrossesQuery(Range r) {
- return InetAddressRange.newCrossesQuery(FIELD_NAME, ((IpRange)r).min, ((IpRange)r).max);
+ return InetAddressRange.newCrossesQuery(FIELD_NAME, ((IpRange)r).minAddress, ((IpRange)r).maxAddress);
}
/** encapsulated IpRange for test validation */
private class IpRange extends Range {
- InetAddress min;
- InetAddress max;
+ InetAddress minAddress;
+ InetAddress maxAddress;
+ byte[] min;
+ byte[] max;
IpRange(InetAddress min, InetAddress max) {
- this.min = min;
- this.max = max;
+ this.minAddress = min;
+ this.maxAddress = max;
+ this.min = InetAddressPoint.encode(min);
+ this.max = InetAddressPoint.encode(max);
}
@Override
@@ -138,33 +120,39 @@ public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
@Override
protected InetAddress getMin(int dim) {
- return min;
+ return minAddress;
}
@Override
protected void setMin(int dim, Object val) {
- byte[] v = ((InetAddress)val).getAddress();
+ InetAddress v = (InetAddress)val;
+ byte[] e = InetAddressPoint.encode(v);
- if (StringHelper.compare(v.length, min.getAddress(), 0, v, 0) < 0) {
- max = (InetAddress)val;
+ if (StringHelper.compare(e.length, min, 0, e, 0) < 0) {
+ max = e;
+ maxAddress = v;
} else {
- min = (InetAddress) val;
+ min = e;
+ minAddress = v;
}
}
@Override
protected InetAddress getMax(int dim) {
- return max;
+ return maxAddress;
}
@Override
protected void setMax(int dim, Object val) {
- byte[] v = ((InetAddress)val).getAddress();
+ InetAddress v = (InetAddress)val;
+ byte[] e = InetAddressPoint.encode(v);
- if (StringHelper.compare(v.length, max.getAddress(), 0, v, 0) > 0) {
- min = (InetAddress)val;
+ if (StringHelper.compare(e.length, max, 0, e, 0) > 0) {
+ min = e;
+ minAddress = v;
} else {
- max = (InetAddress) val;
+ max = e;
+ maxAddress = v;
}
}
@@ -177,37 +165,31 @@ public class TestInetAddressRangeQueries extends BaseRangeFieldQueryTestCase {
@Override
protected boolean isDisjoint(Range o) {
IpRange other = (IpRange)o;
- byte[] bMin = min.getAddress();
- byte[] bMax = max.getAddress();
- return StringHelper.compare(bMin.length, bMin, 0, other.max.getAddress(), 0) > 0 ||
- StringHelper.compare(bMax.length, bMax, 0, other.min.getAddress(), 0) < 0;
+ return StringHelper.compare(min.length, min, 0, other.max, 0) > 0 ||
+ StringHelper.compare(max.length, max, 0, other.min, 0) < 0;
}
@Override
protected boolean isWithin(Range o) {
IpRange other = (IpRange)o;
- byte[] bMin = min.getAddress();
- byte[] bMax = max.getAddress();
- return StringHelper.compare(bMin.length, bMin, 0, other.min.getAddress(), 0) >= 0 &&
- StringHelper.compare(bMax.length, bMax, 0, other.max.getAddress(), 0) <= 0;
+ return StringHelper.compare(min.length, min, 0, other.min, 0) >= 0 &&
+ StringHelper.compare(max.length, max, 0, other.max, 0) <= 0;
}
@Override
protected boolean contains(Range o) {
IpRange other = (IpRange)o;
- byte[] bMin = min.getAddress();
- byte[] bMax = max.getAddress();
- return StringHelper.compare(bMin.length, bMin, 0, other.min.getAddress(), 0) <= 0 &&
- StringHelper.compare(bMax.length, bMax, 0, other.max.getAddress(), 0) >= 0;
+ return StringHelper.compare(min.length, min, 0, other.min, 0) <= 0 &&
+ StringHelper.compare(max.length, max, 0, other.max, 0) >= 0;
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
b.append("Box(");
- b.append(min.getHostAddress());
+ b.append(minAddress.getHostAddress());
b.append(" TO ");
- b.append(max.getHostAddress());
+ b.append(maxAddress.getHostAddress());
b.append(")");
return b.toString();
}
[02/12] lucene-solr:jira/solr-9959: SOLR-10277: On 'downnode',
lots of wasteful mutations are done to ZK
Posted by ab...@apache.org.
SOLR-10277: On 'downnode', lots of wasteful mutations are done to ZK
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/60303028
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/60303028
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/60303028
Branch: refs/heads/jira/solr-9959
Commit: 60303028debf3927e0c3abfaaa4015f73b88e689
Parents: f08889f
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Wed Apr 5 16:01:44 2017 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Wed Apr 5 16:01:44 2017 +0530
----------------------------------------------------------------------
solr/CHANGES.txt | 3 +
.../java/org/apache/solr/cloud/Overseer.java | 2 +-
.../apache/solr/cloud/overseer/NodeMutator.java | 29 +-
.../solr/cloud/overseer/ZkWriteCommand.java | 5 +
.../apache/solr/cloud/ClusterStateMockUtil.java | 233 ++++++++++++++++
.../org/apache/solr/cloud/NodeMutatorTest.java | 95 +++++++
.../SharedFSAutoReplicaFailoverUtilsTest.java | 263 ++-----------------
7 files changed, 378 insertions(+), 252 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4fa0353..3540315 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -207,6 +207,9 @@ Bug Fixes
* SOLR-10416: The JSON output of /admin/metrics is fixed to write the container as a
map (SimpleOrderedMap) instead of an array (NamedList). (shalin)
+* SOLR-10277: On 'downnode', lots of wasteful mutations are done to ZK.
+ (Joshua Humphries, Scott Blum, Varun Thacker, shalin)
+
================== 6.5.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/core/src/java/org/apache/solr/cloud/Overseer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 4d3cee7..f97fbac 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -383,7 +383,7 @@ public class Overseer implements Closeable {
}
break;
case DOWNNODE:
- return new NodeMutator(getZkStateReader()).downNode(clusterState, message);
+ return new NodeMutator().downNode(clusterState, message);
default:
throw new RuntimeException("unknown operation:" + operation + " contents:" + message.getProperties());
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java b/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
index 0036fe1..55fd3ef 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/NodeMutator.java
@@ -19,7 +19,6 @@ package org.apache.solr.cloud.overseer;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -38,44 +37,44 @@ public class NodeMutator {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
- public NodeMutator(ZkStateReader zkStateReader) {
-
- }
-
public List<ZkWriteCommand> downNode(ClusterState clusterState, ZkNodeProps message) {
- List<ZkWriteCommand> zkWriteCommands = new ArrayList<ZkWriteCommand>();
+ List<ZkWriteCommand> zkWriteCommands = new ArrayList<>();
String nodeName = message.getStr(ZkStateReader.NODE_NAME_PROP);
log.debug("DownNode state invoked for node: " + nodeName);
Map<String, DocCollection> collections = clusterState.getCollectionsMap();
for (Map.Entry<String, DocCollection> entry : collections.entrySet()) {
+ String collection = entry.getKey();
DocCollection docCollection = entry.getValue();
+
Map<String,Slice> slicesCopy = new LinkedHashMap<>(docCollection.getSlicesMap());
- for (Entry<String,Slice> sliceEntry : slicesCopy.entrySet()) {
- Slice slice = docCollection.getSlice(sliceEntry.getKey());
- Map<String,Replica> newReplicas = new HashMap<String,Replica>();
+ boolean needToUpdateCollection = false;
+ for (Entry<String, Slice> sliceEntry : slicesCopy.entrySet()) {
+ Slice slice = sliceEntry.getValue();
+ Map<String, Replica> newReplicas = slice.getReplicasCopy();
Collection<Replica> replicas = slice.getReplicas();
for (Replica replica : replicas) {
- Map<String,Object> props = replica.shallowCopy();
String rNodeName = replica.getNodeName();
if (rNodeName.equals(nodeName)) {
log.debug("Update replica state for " + replica + " to " + Replica.State.DOWN.toString());
+ Map<String, Object> props = replica.shallowCopy();
props.put(ZkStateReader.STATE_PROP, Replica.State.DOWN.toString());
+ Replica newReplica = new Replica(replica.getName(), props);
+ newReplicas.put(replica.getName(), newReplica);
+ needToUpdateCollection = true;
}
-
- Replica newReplica = new Replica(replica.getName(), props);
- newReplicas.put(replica.getName(), newReplica);
}
Slice newSlice = new Slice(slice.getName(), newReplicas, slice.shallowCopy());
slicesCopy.put(slice.getName(), newSlice);
-
}
- zkWriteCommands.add(new ZkWriteCommand(entry.getKey(), docCollection.copyWithSlices(slicesCopy)));
+ if (needToUpdateCollection) {
+ zkWriteCommands.add(new ZkWriteCommand(collection, docCollection.copyWithSlices(slicesCopy)));
+ }
}
return zkWriteCommands;
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
index 1697522..d464863 100644
--- a/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
+++ b/solr/core/src/java/org/apache/solr/cloud/overseer/ZkWriteCommand.java
@@ -41,5 +41,10 @@ public class ZkWriteCommand {
public static ZkWriteCommand noop() {
return new ZkWriteCommand();
}
+
+ @Override
+ public String toString() {
+ return getClass().getSimpleName() + ": " + (noop ? "no-op" : name + "=" + collection);
+ }
}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java
new file mode 100644
index 0000000..e0cf3f7
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateMockUtil.java
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.Utils;
+
+public class ClusterStateMockUtil {
+
+ private final static Pattern BLUEPRINT = Pattern.compile("([a-z])(\\d+)?(?:(['A','R','D','F']))?(\\*)?");
+
+ protected static class Result implements Closeable {
+ OverseerAutoReplicaFailoverThread.DownReplica badReplica;
+ ZkStateReader reader;
+
+ @Override
+ public void close() throws IOException {
+ reader.close();
+ }
+ }
+
+ protected static ClusterStateMockUtil.Result buildClusterState(List<Result> results, String string, String ... liveNodes) {
+ return buildClusterState(results, string, 1, liveNodes);
+ }
+
+ protected static ClusterStateMockUtil.Result buildClusterState(List<Result> results, String string, int replicationFactor, String ... liveNodes) {
+ return buildClusterState(results, string, replicationFactor, 10, liveNodes);
+ }
+
+ /**
+ * This method lets you construct a complex ClusterState object by using simple strings of letters.
+ *
+ * c = collection, s = slice, r = replica, \d = node number (r2 means the replica is on node 2),
+ * state = [A,R,D,F], * = replica to replace, binds to the left.
+ *
+ * For example:
+ * csrr2rD*sr2csr
+ *
+ * Creates:
+ *
+ * 'csrr2rD*'
+ * A collection, a shard, a replica on node 1 (the default) that is active (the default), a replica on node 2, and a replica on node 1
+ * that has a state of down and is the replica we will be looking to put somewhere else (the *).
+ *
+ * 'sr2'
+ * Then, another shard that has a replica on node 2.
+ *
+ * 'csr'
+ * Then, another collection that has a shard with a single active replica on node 1.
+ *
+ * Result:
+ * {
+ * "collection2":{
+ * "maxShardsPerNode":"1",
+ * "replicationFactor":"1",
+ * "shards":{"slice1":{
+ * "state":"active",
+ * "replicas":{"replica5":{
+ * "state":"active",
+ * "node_name":"baseUrl1_",
+ * "base_url":"http://baseUrl1"}}}}},
+ * "collection1":{
+ * "maxShardsPerNode":"1",
+ * "replicationFactor":"1",
+ * "shards":{
+ * "slice1":{
+ * "state":"active",
+ * "replicas":{
+ * "replica3 (bad)":{
+ * "state":"down",
+ * "node_name":"baseUrl1_",
+ * "base_url":"http://baseUrl1"},
+ * "replica2":{
+ * "state":"active",
+ * "node_name":"baseUrl2_",
+ * "base_url":"http://baseUrl2"},
+ * "replica1":{
+ * "state":"active",
+ * "node_name":"baseUrl1_",
+ * "base_url":"http://baseUrl1"}}},
+ * "slice2":{
+ * "state":"active",
+ * "replicas":{"replica4":{
+ * "state":"active",
+ * "node_name":"baseUrl2_",
+ * "base_url":"http://baseUrl2"}}}}}}
+ *
+ */
+ @SuppressWarnings("resource")
+ protected static ClusterStateMockUtil.Result buildClusterState(List<Result> results, String clusterDescription, int replicationFactor, int maxShardsPerNode, String ... liveNodes) {
+ ClusterStateMockUtil.Result result = new ClusterStateMockUtil.Result();
+
+ Map<String,Slice> slices = null;
+ Map<String,Replica> replicas = null;
+ Map<String,Object> collectionProps = new HashMap<>();
+ collectionProps.put(ZkStateReader.MAX_SHARDS_PER_NODE, Integer.toString(maxShardsPerNode));
+ collectionProps.put(ZkStateReader.REPLICATION_FACTOR, Integer.toString(replicationFactor));
+ Map<String,DocCollection> collectionStates = new HashMap<>();
+ DocCollection docCollection = null;
+ Slice slice = null;
+ int replicaCount = 1;
+
+ Matcher m = BLUEPRINT.matcher(clusterDescription);
+ while (m.find()) {
+ Replica replica;
+ switch (m.group(1)) {
+ case "c":
+ slices = new HashMap<>();
+ docCollection = new DocCollection("collection" + (collectionStates.size() + 1), slices, collectionProps, null);
+ collectionStates.put(docCollection.getName(), docCollection);
+ break;
+ case "s":
+ replicas = new HashMap<>();
+ slice = new Slice("slice" + (slices.size() + 1), replicas, null);
+ slices.put(slice.getName(), slice);
+ break;
+ case "r":
+ Map<String,Object> replicaPropMap = new HashMap<>();
+ String node;
+
+ node = m.group(2);
+
+ if (node == null || node.trim().length() == 0) {
+ node = "1";
+ }
+
+ Replica.State state = Replica.State.ACTIVE;
+ String stateCode = m.group(3);
+
+ if (stateCode != null) {
+ switch (stateCode.charAt(0)) {
+ case 'S':
+ state = Replica.State.ACTIVE;
+ break;
+ case 'R':
+ state = Replica.State.RECOVERING;
+ break;
+ case 'D':
+ state = Replica.State.DOWN;
+ break;
+ case 'F':
+ state = Replica.State.RECOVERY_FAILED;
+ break;
+ default:
+ throw new IllegalArgumentException(
+ "Unexpected state for replica: " + stateCode);
+ }
+ }
+
+ String nodeName = "baseUrl" + node + "_";
+ String replicaName = "replica" + replicaCount++;
+
+ if ("*".equals(m.group(4))) {
+ replicaName += " (bad)";
+ }
+
+ replicaPropMap.put(ZkStateReader.NODE_NAME_PROP, nodeName);
+ replicaPropMap.put(ZkStateReader.BASE_URL_PROP, "http://baseUrl" + node);
+ replicaPropMap.put(ZkStateReader.STATE_PROP, state.toString());
+
+ replica = new Replica(replicaName, replicaPropMap);
+
+ if ("*".equals(m.group(4))) {
+ result.badReplica = new OverseerAutoReplicaFailoverThread.DownReplica();
+ result.badReplica.replica = replica;
+ result.badReplica.slice = slice;
+ result.badReplica.collection = docCollection;
+ }
+
+ replicas.put(replica.getName(), replica);
+ break;
+ default:
+ break;
+ }
+ }
+
+ ClusterState clusterState = new ClusterState(1, new HashSet<>(Arrays.asList(liveNodes)), collectionStates);
+ MockZkStateReader reader = new MockZkStateReader(clusterState, collectionStates.keySet());
+
+ String json;
+ try {
+ json = new String(Utils.toJSON(clusterState), "UTF-8");
+ } catch (UnsupportedEncodingException e) {
+ throw new RuntimeException("Unexpected");
+ }
+ System.err.println(json);
+
+ // todo remove the limitation of always having a bad replica
+ assert result.badReplica != null : "Is there no bad replica?";
+ assert result.badReplica.slice != null : "Is there no bad replica?";
+
+ result.reader = reader;
+
+ if (results != null) {
+ results.add(result);
+ }
+
+ return result;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java b/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java
new file mode 100644
index 0000000..ffa6ba2
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/NodeMutatorTest.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.solr.SolrTestCaseJ4Test;
+import org.apache.solr.cloud.overseer.NodeMutator;
+import org.apache.solr.cloud.overseer.ZkWriteCommand;
+import org.apache.solr.common.cloud.ClusterState;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.junit.Test;
+
+public class NodeMutatorTest extends SolrTestCaseJ4Test {
+
+ private static final String NODE3 = "baseUrl3_";
+ private static final String NODE3_URL = "http://baseUrl3";
+
+ private static final String NODE2 = "baseUrl2_";
+ private static final String NODE2_URL = "http://baseUrl2";
+
+ private static final String NODE1 = "baseUrl1_";
+ private static final String NODE1_URL = "http://baseUrl1";
+
+ @Test
+ public void downNodeReportsAllImpactedCollectionsAndNothingElse() throws IOException {
+ NodeMutator nm = new NodeMutator();
+ ZkNodeProps props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, NODE1);
+
+ //We use 2 nodes with maxShardsPerNode as 1
+ //Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2
+ //Collection2: 1 shard X 1 replica = replica1 on node2
+ ClusterStateMockUtil.Result result = ClusterStateMockUtil.buildClusterState(null, "csrr2rD*csr2", 1, 1, NODE1, NODE2);
+ ClusterState clusterState = result.reader.getClusterState();
+ assertEquals(clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL);
+ assertEquals(clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL);
+ assertEquals(clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL);
+
+ props = new ZkNodeProps(ZkStateReader.NODE_NAME_PROP, NODE1);
+ List<ZkWriteCommand> writes = nm.downNode(clusterState, props);
+ assertEquals(writes.size(), 1);
+ assertEquals(writes.get(0).name, "collection1");
+ assertEquals(writes.get(0).collection.getReplica("replica1").getState(), Replica.State.DOWN);
+ assertEquals(writes.get(0).collection.getReplica("replica2").getState(), Replica.State.ACTIVE);
+ result.close();
+
+ //We use 3 nodes with maxShardsPerNode as 1
+ //Collection1: 2 shards X 1 replica = replica1 on node1 and replica2 on node2
+ //Collection2: 1 shard X 1 replica = replica1 on node2
+ //Collection3: 1 shard X 3 replica = replica1 on node1 , replica2 on node2, replica3 on node3
+ result = ClusterStateMockUtil.buildClusterState(null, "csrr2rD*csr2csr1r2r3", 1, 1, NODE1, NODE2, NODE3);
+ clusterState = result.reader.getClusterState();
+ assertEquals(clusterState.getCollection("collection1").getReplica("replica1").getBaseUrl(), NODE1_URL);
+ assertEquals(clusterState.getCollection("collection1").getReplica("replica2").getBaseUrl(), NODE2_URL);
+
+ assertEquals(clusterState.getCollection("collection2").getReplica("replica4").getBaseUrl(), NODE2_URL);
+
+ assertEquals(clusterState.getCollection("collection3").getReplica("replica5").getBaseUrl(), NODE1_URL);
+ assertEquals(clusterState.getCollection("collection3").getReplica("replica6").getBaseUrl(), NODE2_URL);
+ assertEquals(clusterState.getCollection("collection3").getReplica("replica7").getBaseUrl(), NODE3_URL);
+
+ writes = nm.downNode(clusterState, props);
+ assertEquals(writes.size(), 2);
+ for (ZkWriteCommand write : writes) {
+ if (write.name.equals("collection1")) {
+ assertEquals(write.collection.getReplica("replica1").getState(), Replica.State.DOWN);
+ assertEquals(write.collection.getReplica("replica2").getState(), Replica.State.ACTIVE);
+ } else if (write.name.equals("collection3")) {
+ assertEquals(write.collection.getReplica("replica5").getState(), Replica.State.DOWN);
+ assertEquals(write.collection.getReplica("replica6").getState(), Replica.State.ACTIVE);
+ assertEquals(write.collection.getReplica("replica7").getState(), Replica.State.ACTIVE);
+ } else {
+ fail("No other collection needs to be changed");
+ }
+ }
+ result.close();
+ }
+}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/60303028/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
index f5fee21..3423420 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverUtilsTest.java
@@ -16,30 +16,16 @@
*/
package org.apache.solr.cloud;
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.cloud.OverseerAutoReplicaFailoverThread.DownReplica;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.Utils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
+import static org.apache.solr.cloud.ClusterStateMockUtil.buildClusterState;
+
public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
private static final String NODE6 = "baseUrl6_";
private static final String NODE6_URL = "http://baseUrl6";
@@ -58,12 +44,8 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
private static final String NODE1 = "baseUrl1_";
private static final String NODE1_URL = "http://baseUrl1";
-
- private final static Pattern BLUEPRINT = Pattern.compile("([a-z])(\\d+)?(?:(['A','R','D','F']))?(\\*)?");
-
- private int buildNumber = 1;
- private List<Result> results;
+ private List<ClusterStateMockUtil.Result> results;
@Before
public void setUp() throws Exception {
@@ -74,61 +56,50 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
@After
public void tearDown() throws Exception {
super.tearDown();
- for (Result result : results) {
+ for (ClusterStateMockUtil.Result result : results) {
result.close();
}
}
@Test
public void testGetBestCreateUrlBasics() {
- Result result = buildClusterState("csr1R*r2", NODE1);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr1R*r2", NODE1);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull("Should be no live node to failover to", createUrl);
- result = buildClusterState("csr1R*r2", NODE1, NODE2);
+ result = buildClusterState(results, "csr1R*r2", NODE1, NODE2);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull("Only failover candidate node already has a replica", createUrl);
- result = buildClusterState("csr1R*r2sr3", NODE1, NODE2, NODE3);
+ result = buildClusterState(results, "csr1R*r2sr3", NODE1, NODE2, NODE3);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals("Node3 does not have a replica from the bad slice and should be the best choice", NODE3_URL, createUrl);
- result = buildClusterState("csr1R*r2Fsr3r4r5", NODE1, NODE2, NODE3);
+ result = buildClusterState(results, "csr1R*r2Fsr3r4r5", NODE1, NODE2, NODE3);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertTrue(createUrl.equals(NODE3_URL));
- result = buildClusterState("csr1*r2r3sr3r3sr4", NODE1, NODE2, NODE3, NODE4);
+ result = buildClusterState(results, "csr1*r2r3sr3r3sr4", NODE1, NODE2, NODE3, NODE4);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE4_URL, createUrl);
- result = buildClusterState("csr1*r2sr3r3sr4sr4", NODE1, NODE2, NODE3, NODE4);
+ result = buildClusterState(results, "csr1*r2sr3r3sr4sr4", NODE1, NODE2, NODE3, NODE4);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertTrue(createUrl.equals(NODE3_URL) || createUrl.equals(NODE4_URL));
}
-
-
- private static class Result implements Closeable {
- DownReplica badReplica;
- ZkStateReader reader;
-
- @Override
- public void close() throws IOException {
- reader.close();
- }
- }
@Test
public void testGetBestCreateUrlMultipleCollections() throws Exception {
- Result result = buildClusterState("csr*r2csr2", NODE1);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr*r2csr2", NODE1);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull(createUrl);
- result = buildClusterState("csr*r2csr2", NODE1);
+ result = buildClusterState(results, "csr*r2csr2", NODE1);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull(createUrl);
- result = buildClusterState("csr*r2csr2", NODE1, NODE2);
+ result = buildClusterState(results, "csr*r2csr2", NODE1, NODE2);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull(createUrl);
}
@@ -136,11 +107,11 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
@Test
public void testGetBestCreateUrlMultipleCollections2() {
- Result result = buildClusterState("csr*r2sr3cr2", NODE1);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr*r2sr3cr2", NODE1);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull(createUrl);
- result = buildClusterState("csr*r2sr3cr2", NODE1, NODE2, NODE3);
+ result = buildClusterState(results, "csr*r2sr3cr2", NODE1, NODE2, NODE3);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE3_URL, createUrl);
}
@@ -148,253 +119,73 @@ public class SharedFSAutoReplicaFailoverUtilsTest extends SolrTestCaseJ4 {
@Test
public void testGetBestCreateUrlMultipleCollections3() {
- Result result = buildClusterState("csr5r1sr4r2sr3r6csr2*r6sr5r3sr4r3", NODE1, NODE4, NODE5, NODE6);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr5r1sr4r2sr3r6csr2*r6sr5r3sr4r3", NODE1, NODE4, NODE5, NODE6);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE1_URL, createUrl);
}
@Test
public void testGetBestCreateUrlMultipleCollections4() {
- Result result = buildClusterState("csr1r4sr3r5sr2r6csr5r6sr4r6sr5*r4", NODE6);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr1r4sr3r5sr2r6csr5r6sr4r6sr5*r4", NODE6);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE6_URL, createUrl);
}
@Test
public void testFailOverToEmptySolrInstance() {
- Result result = buildClusterState("csr1*r1sr1csr1", NODE2);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr1*r1sr1csr1", NODE2);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE2_URL, createUrl);
}
@Test
public void testFavorForeignSlices() {
- Result result = buildClusterState("csr*sr2csr3r3", NODE2, NODE3);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr*sr2csr3r3", NODE2, NODE3);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE3_URL, createUrl);
- result = buildClusterState("csr*sr2csr3r3r3r3r3r3r3", NODE2, NODE3);
+ result = buildClusterState(results, "csr*sr2csr3r3r3r3r3r3r3", NODE2, NODE3);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE2_URL, createUrl);
}
@Test
public void testCollectionMaxNodesPerShard() {
- Result result = buildClusterState("csr*sr2", 1, 1, NODE2);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr*sr2", 1, 1, NODE2);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertNull(createUrl);
- result = buildClusterState("csr*sr2", 1, 2, NODE2);
+ result = buildClusterState(results, "csr*sr2", 1, 2, NODE2);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE2_URL, createUrl);
- result = buildClusterState("csr*csr2r2", 1, 1, NODE2);
+ result = buildClusterState(results, "csr*csr2r2", 1, 1, NODE2);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, null);
assertEquals(NODE2_URL, createUrl);
}
@Test
public void testMaxCoresPerNode() {
- Result result = buildClusterState("csr*sr2", 1, 1, NODE2);
+ ClusterStateMockUtil.Result result = buildClusterState(results, "csr*sr2", 1, 1, NODE2);
String createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 1);
assertNull(createUrl);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
assertNull(createUrl);
- result = buildClusterState("csr*sr2", 1, 2, NODE2);
+ result = buildClusterState(results, "csr*sr2", 1, 2, NODE2);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
assertEquals(NODE2_URL, createUrl);
- result = buildClusterState("csr*sr2sr3sr4", 1, 1, NODE2, NODE3, NODE4);
+ result = buildClusterState(results, "csr*sr2sr3sr4", 1, 1, NODE2, NODE3, NODE4);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 1);
assertNull(createUrl);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
assertNull(createUrl);
- result = buildClusterState("csr*sr2sr3sr4", 1, 2, NODE2, NODE3, NODE4);
+ result = buildClusterState(results, "csr*sr2sr3sr4", 1, 2, NODE2, NODE3, NODE4);
createUrl = OverseerAutoReplicaFailoverThread.getBestCreateUrl(result.reader, result.badReplica, 2);
assertTrue(createUrl.equals(NODE3_URL) || createUrl.equals(NODE4_URL));
}
-
- private Result buildClusterState(String string, String ... liveNodes) {
- return buildClusterState(string, 1, liveNodes);
- }
-
- private Result buildClusterState(String string, int replicationFactor, String ... liveNodes) {
- return buildClusterState(string, replicationFactor, 10, liveNodes);
- }
-
- /**
- * This method lets you construct a complex ClusterState object by using simple strings of letters.
- *
- * c = collection, s = slice, r = replica, \d = node number (r2 means the replica is on node 2),
- * state = [A,R,D,F], * = replica to replace, binds to the left.
- *
- * For example:
- * csrr2rD*sr2csr
- *
- * Creates:
- *
- * 'csrr2rD*'
- * A collection, a shard, a replica on node 1 (the default) that is active (the default), a replica on node 2, and a replica on node 1
- * that has a state of down and is the replica we will be looking to put somewhere else (the *).
- *
- * 'sr2'
- * Then, another shard that has a replica on node 2.
- *
- * 'csr'
- * Then, another collection that has a shard with a single active replica on node 1.
- *
- * Result:
- * {
- * "collection2":{
- * "maxShardsPerNode":"1",
- * "replicationFactor":"1",
- * "shards":{"slice1":{
- * "state":"active",
- * "replicas":{"replica5":{
- * "state":"active",
- * "node_name":"baseUrl1_",
- * "base_url":"http://baseUrl1"}}}}},
- * "collection1":{
- * "maxShardsPerNode":"1",
- * "replicationFactor":"1",
- * "shards":{
- * "slice1":{
- * "state":"active",
- * "replicas":{
- * "replica3 (bad)":{
- * "state":"down",
- * "node_name":"baseUrl1_",
- * "base_url":"http://baseUrl1"},
- * "replica2":{
- * "state":"active",
- * "node_name":"baseUrl2_",
- * "base_url":"http://baseUrl2"},
- * "replica1":{
- * "state":"active",
- * "node_name":"baseUrl1_",
- * "base_url":"http://baseUrl1"}}},
- * "slice2":{
- * "state":"active",
- * "replicas":{"replica4":{
- * "state":"active",
- * "node_name":"baseUrl2_",
- * "base_url":"http://baseUrl2"}}}}}}
- *
- */
- @SuppressWarnings("resource")
- private Result buildClusterState(String clusterDescription, int replicationFactor, int maxShardsPerNode, String ... liveNodes) {
- Result result = new Result();
-
- Map<String,Slice> slices = null;
- Map<String,Replica> replicas = null;
- Map<String,Object> collectionProps = new HashMap<>();
- collectionProps.put(ZkStateReader.MAX_SHARDS_PER_NODE, Integer.toString(maxShardsPerNode));
- collectionProps.put(ZkStateReader.REPLICATION_FACTOR, Integer.toString(replicationFactor));
- Map<String,DocCollection> collectionStates = new HashMap<>();
- DocCollection docCollection = null;
- Slice slice = null;
- int replicaCount = 1;
-
- Matcher m = BLUEPRINT.matcher(clusterDescription);
- while (m.find()) {
- Replica replica;
- switch (m.group(1)) {
- case "c":
- slices = new HashMap<>();
- docCollection = new DocCollection("collection" + (collectionStates.size() + 1), slices, collectionProps, null);
- collectionStates.put(docCollection.getName(), docCollection);
- break;
- case "s":
- replicas = new HashMap<>();
- slice = new Slice("slice" + (slices.size() + 1), replicas, null);
- slices.put(slice.getName(), slice);
- break;
- case "r":
- Map<String,Object> replicaPropMap = new HashMap<>();
- String node;
-
- node = m.group(2);
-
- if (node == null || node.trim().length() == 0) {
- node = "1";
- }
-
- Replica.State state = Replica.State.ACTIVE;
- String stateCode = m.group(3);
-
- if (stateCode != null) {
- switch (stateCode.charAt(0)) {
- case 'S':
- state = Replica.State.ACTIVE;
- break;
- case 'R':
- state = Replica.State.RECOVERING;
- break;
- case 'D':
- state = Replica.State.DOWN;
- break;
- case 'F':
- state = Replica.State.RECOVERY_FAILED;
- break;
- default:
- throw new IllegalArgumentException(
- "Unexpected state for replica: " + stateCode);
- }
- }
-
- String nodeName = "baseUrl" + node + "_";
- String replicaName = "replica" + replicaCount++;
-
- if ("*".equals(m.group(4))) {
- replicaName += " (bad)";
- }
-
- replicaPropMap.put(ZkStateReader.NODE_NAME_PROP, nodeName);
- replicaPropMap.put(ZkStateReader.BASE_URL_PROP, "http://baseUrl" + node);
- replicaPropMap.put(ZkStateReader.STATE_PROP, state.toString());
-
- replica = new Replica(replicaName, replicaPropMap);
-
- if ("*".equals(m.group(4))) {
- result.badReplica = new DownReplica();
- result.badReplica.replica = replica;
- result.badReplica.slice = slice;
- result.badReplica.collection = docCollection;
- }
-
- replicas.put(replica.getName(), replica);
- break;
- default:
- break;
- }
- }
-
- // trunk briefly had clusterstate taking a zkreader :( this was required to work around that - leaving
- // until that issue is resolved.
- MockZkStateReader reader = new MockZkStateReader(null, collectionStates.keySet());
- ClusterState clusterState = new ClusterState(1, new HashSet<>(Arrays.asList(liveNodes)), collectionStates);
- reader = new MockZkStateReader(clusterState, collectionStates.keySet());
-
- String json;
- try {
- json = new String(Utils.toJSON(clusterState), "UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException("Unexpected");
- }
- System.err.println("build:" + buildNumber++);
- System.err.println(json);
-
- assert result.badReplica != null : "Is there no bad replica?";
- assert result.badReplica.slice != null : "Is there no bad replica?";
-
- result.reader = reader;
-
- results.add(result);
-
- return result;
- }
}
[04/12] lucene-solr:jira/solr-9959: Remove unused (private static
final) loggers in LTRQParserPlugin and LTRFeatureLoggerTransformerFactory.
Posted by ab...@apache.org.
Remove unused (private static final) loggers in LTRQParserPlugin and LTRFeatureLoggerTransformerFactory.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/50ed7294
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/50ed7294
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/50ed7294
Branch: refs/heads/jira/solr-9959
Commit: 50ed729459a55360c6179191babb0ee93b91b632
Parents: dfa342b
Author: Christine Poerschke <cp...@apache.org>
Authored: Wed Apr 5 11:52:34 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Wed Apr 5 11:52:34 2017 +0100
----------------------------------------------------------------------
.../response/transform/LTRFeatureLoggerTransformerFactory.java | 5 -----
.../src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java | 5 -----
2 files changed, 10 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50ed7294/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
index 9585a7f..beb5ddf 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/response/transform/LTRFeatureLoggerTransformerFactory.java
@@ -17,7 +17,6 @@
package org.apache.solr.ltr.response.transform;
import java.io.IOException;
-import java.lang.invoke.MethodHandles;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
@@ -47,8 +46,6 @@ import org.apache.solr.response.transform.DocTransformer;
import org.apache.solr.response.transform.TransformerFactory;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.SolrPluginUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* This transformer will take care to generate and append in the response the
@@ -65,8 +62,6 @@ import org.slf4j.LoggerFactory;
public class LTRFeatureLoggerTransformerFactory extends TransformerFactory {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
// used inside fl to specify the format (dense|sparse) of the extracted features
private static final String FV_FORMAT = "format";
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50ed7294/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
index 2b4d570..c5db963 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/search/LTRQParserPlugin.java
@@ -17,7 +17,6 @@
package org.apache.solr.ltr.search;
import java.io.IOException;
-import java.lang.invoke.MethodHandles;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
@@ -46,8 +45,6 @@ import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.RankQuery;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.util.SolrPluginUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Plug into solr a rerank model.
@@ -60,8 +57,6 @@ public class LTRQParserPlugin extends QParserPlugin implements ResourceLoaderAwa
public static final String NAME = "ltr";
private static Query defaultQuery = new MatchAllDocsQuery();
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
// params for setting custom external info that features can use, like query
// intent
static final String EXTERNAL_FEATURE_INFO = "efi.";