You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2018/09/27 07:23:44 UTC
lucene-solr:jira/solr-12709: Fix precommit. Rename the test to better
reflect what it's doing.
Repository: lucene-solr
Updated Branches:
refs/heads/jira/solr-12709 359829fbf -> c5c101904
Fix precommit. Rename the test to better reflect what it's doing.
Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c5c10190
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c5c10190
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c5c10190
Branch: refs/heads/jira/solr-12709
Commit: c5c101904bdd6eb03d802e748ef41beae421f10e
Parents: 359829f
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Wed Sep 26 19:18:06 2018 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Wed Sep 26 19:18:06 2018 +0200
----------------------------------------------------------------------
.../sim/SimClusterStateProvider.java | 36 ++---
.../autoscaling/sim/TestSimAutoScaling.java | 134 ----------------
.../autoscaling/sim/TestSimExtremeIndexing.java | 151 +++++++++++++++++++
3 files changed, 169 insertions(+), 152 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c5c10190/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index f63a844..d8668e3 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -1946,24 +1946,24 @@ public class SimClusterStateProvider implements ClusterStateProvider {
perColl.put(" activeReplicas", activeReplicas);
perColl.put(" inactiveReplicas", totalReplicas - activeReplicas);
long totalDocs = (long)docs.getSum() + bufferedDocs;
- perColl.put("totalActiveDocs", String.format("%,d", totalDocs));
- perColl.put(" bufferedDocs", String.format("%,d", bufferedDocs));
- perColl.put(" maxActiveSliceDocs", String.format("%,d", (long)docs.getMax()));
- perColl.put(" minActiveSliceDocs", String.format("%,d", (long)docs.getMin()));
- perColl.put(" avgActiveSliceDocs", String.format("%,.0f", docs.getMean()));
- perColl.put("totalInactiveDocs", String.format("%,d", (long)inactiveDocs.getSum()));
- perColl.put(" maxInactiveSliceDocs", String.format("%,d", (long)inactiveDocs.getMax()));
- perColl.put(" minInactiveSliceDocs", String.format("%,d", (long)inactiveDocs.getMin()));
- perColl.put(" avgInactiveSliceDocs", String.format("%,.0f", inactiveDocs.getMean()));
- perColl.put("totalActiveBytes", String.format("%,d", (long)bytes.getSum()));
- perColl.put(" maxActiveSliceBytes", String.format("%,d", (long)bytes.getMax()));
- perColl.put(" minActiveSliceBytes", String.format("%,d", (long)bytes.getMin()));
- perColl.put(" avgActiveSliceBytes", String.format("%,.0f", bytes.getMean()));
- perColl.put("totalInactiveBytes", String.format("%,d", (long)inactiveBytes.getSum()));
- perColl.put(" maxInactiveSliceBytes", String.format("%,d", (long)inactiveBytes.getMax()));
- perColl.put(" minInactiveSliceBytes", String.format("%,d", (long)inactiveBytes.getMin()));
- perColl.put(" avgInactiveSliceBytes", String.format("%,.0f", inactiveBytes.getMean()));
- perColl.put("totalActiveDeletedDocs", String.format("%,d", deletedDocs));
+ perColl.put("totalActiveDocs", String.format(Locale.ROOT, "%,d", totalDocs));
+ perColl.put(" bufferedDocs", String.format(Locale.ROOT, "%,d", bufferedDocs));
+ perColl.put(" maxActiveSliceDocs", String.format(Locale.ROOT, "%,d", (long)docs.getMax()));
+ perColl.put(" minActiveSliceDocs", String.format(Locale.ROOT, "%,d", (long)docs.getMin()));
+ perColl.put(" avgActiveSliceDocs", String.format(Locale.ROOT, "%,.0f", docs.getMean()));
+ perColl.put("totalInactiveDocs", String.format(Locale.ROOT, "%,d", (long)inactiveDocs.getSum()));
+ perColl.put(" maxInactiveSliceDocs", String.format(Locale.ROOT, "%,d", (long)inactiveDocs.getMax()));
+ perColl.put(" minInactiveSliceDocs", String.format(Locale.ROOT, "%,d", (long)inactiveDocs.getMin()));
+ perColl.put(" avgInactiveSliceDocs", String.format(Locale.ROOT, "%,.0f", inactiveDocs.getMean()));
+ perColl.put("totalActiveBytes", String.format(Locale.ROOT, "%,d", (long)bytes.getSum()));
+ perColl.put(" maxActiveSliceBytes", String.format(Locale.ROOT, "%,d", (long)bytes.getMax()));
+ perColl.put(" minActiveSliceBytes", String.format(Locale.ROOT, "%,d", (long)bytes.getMin()));
+ perColl.put(" avgActiveSliceBytes", String.format(Locale.ROOT, "%,.0f", bytes.getMean()));
+ perColl.put("totalInactiveBytes", String.format(Locale.ROOT, "%,d", (long)inactiveBytes.getSum()));
+ perColl.put(" maxInactiveSliceBytes", String.format(Locale.ROOT, "%,d", (long)inactiveBytes.getMax()));
+ perColl.put(" minInactiveSliceBytes", String.format(Locale.ROOT, "%,d", (long)inactiveBytes.getMin()));
+ perColl.put(" avgInactiveSliceBytes", String.format(Locale.ROOT, "%,.0f", inactiveBytes.getMean()));
+ perColl.put("totalActiveDeletedDocs", String.format(Locale.ROOT, "%,d", deletedDocs));
});
return stats;
} finally {
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c5c10190/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimAutoScaling.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimAutoScaling.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimAutoScaling.java
deleted file mode 100644
index 6744955..0000000
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimAutoScaling.java
+++ /dev/null
@@ -1,134 +0,0 @@
-package org.apache.solr.cloud.autoscaling.sim;
-
-import java.lang.invoke.MethodHandles;
-import java.util.Iterator;
-
-import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.request.UpdateRequest;
-import org.apache.solr.cloud.CloudTestUtils;
-import org.apache.solr.cloud.autoscaling.ExecutePlanAction;
-import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.SolrInputField;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.TimeSource;
-import org.apache.solr.util.LogLevel;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.apache.solr.cloud.autoscaling.AutoScalingHandlerTest.createAutoScalingRequest;
-
-/**
- *
- */
-@Ignore("not ready yet")
-@TimeoutSuite(millis = 48 * 3600 * 1000)
-@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.NodeLostTrigger=INFO;org.apache.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.ComputePlanAction=INFO;org.apache.solr.cloud.autoscaling.ExecutePlanAction=DEBUG;org.apache.solr.cloud.autoscaling.ScheduledTriggers=DEBUG")
-//@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.NodeLostTrigger=INFO;org.apache.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.CloudTestUtils=TRACE")
-public class TestSimAutoScaling extends SimSolrCloudTestCase {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- private static final int SPEED = 500;
- private static final int NUM_NODES = 200;
-
- private static final long BATCH_SIZE = 200000;
- private static final long NUM_BATCHES = 5000000;
- private static final long ABOVE_SIZE = 20000000;
-
-
- private static TimeSource timeSource;
- private static SolrClient solrClient;
-
- @BeforeClass
- public static void setupCluster() throws Exception {
- configureCluster(NUM_NODES, TimeSource.get("simTime:" + SPEED));
- timeSource = cluster.getTimeSource();
- solrClient = cluster.simGetSolrClient();
- cluster.simSetUseSystemCollection(false);
- }
-
- @AfterClass
- public static void tearDownCluster() throws Exception {
- solrClient = null;
- }
-
- @Test
- public void testScaleUp() throws Exception {
- String collectionName = "testScaleUp_collection";
- CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName,
- "conf", 2, 2).setMaxShardsPerNode(10);
- create.process(solrClient);
- CloudTestUtils.waitForState(cluster, "failed to create " + collectionName, collectionName,
- CloudTestUtils.clusterShape(2, 2, false, true));
-
- //long waitForSeconds = 3 + random().nextInt(5);
- long waitForSeconds = 1;
- String setTriggerCommand = "{" +
- "'set-trigger' : {" +
- "'name' : 'scaleUpTrigger'," +
- "'event' : 'indexSize'," +
- "'waitFor' : '" + waitForSeconds + "s'," +
- "'aboveDocs' : " + ABOVE_SIZE + "," +
- "'enabled' : true," +
- "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," +
- "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" +
- "}}";
- SolrRequest req = createAutoScalingRequest(SolrRequest.METHOD.POST, setTriggerCommand);
- NamedList<Object> response = solrClient.request(req);
- assertEquals(response.get("result").toString(), "success");
-
- long batchSize = BATCH_SIZE;
- for (long i = 0; i < NUM_BATCHES; i++) {
- addDocs(collectionName, i * batchSize, batchSize);
- log.info(String.format("#### Total docs so far: %,d", ((i + 1) * batchSize)));
- timeSource.sleep(waitForSeconds);
- }
- timeSource.sleep(60000);
- }
-
- private void addDocs(String collection, long start, long count) throws Exception {
- UpdateRequest ureq = new UpdateRequest();
- ureq.setParam("collection", collection);
- ureq.setDocIterator(new FakeDocIterator(start, count));
- solrClient.request(ureq);
- }
-
- // lightweight generator of fake documents
- // NOTE: this iterator only ever returns the same document, which works ok
- // for our "index update" simulation. Obviously don't use this for real indexing.
- private static class FakeDocIterator implements Iterator<SolrInputDocument> {
- final SolrInputDocument doc = new SolrInputDocument();
- final SolrInputField idField = new SolrInputField("id");
-
- final long start, count;
-
- long current, max;
-
- FakeDocIterator(long start, long count) {
- this.start = start;
- this.count = count;
- current = start;
- max = start + count;
- doc.put("id", idField);
- idField.setValue("foo");
- }
-
- @Override
- public boolean hasNext() {
- return current < max;
- }
-
- @Override
- public SolrInputDocument next() {
- current++;
- return doc;
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c5c10190/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java
new file mode 100644
index 0000000..00dacfb
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimExtremeIndexing.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud.autoscaling.sim;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Iterator;
+import java.util.Locale;
+
+import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.cloud.CloudTestUtils;
+import org.apache.solr.cloud.autoscaling.ExecutePlanAction;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.SolrInputField;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.util.LogLevel;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.cloud.autoscaling.AutoScalingHandlerTest.createAutoScalingRequest;
+
+/**
+ *
+ */
+@Ignore("not ready yet")
+@TimeoutSuite(millis = 48 * 3600 * 1000)
+@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.NodeLostTrigger=INFO;org.apache.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.ComputePlanAction=INFO;org.apache.solr.cloud.autoscaling.ExecutePlanAction=DEBUG;org.apache.solr.cloud.autoscaling.ScheduledTriggers=DEBUG")
+//@LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.NodeLostTrigger=INFO;org.apache.client.solrj.cloud.autoscaling=DEBUG;org.apache.solr.cloud.CloudTestUtils=TRACE")
+public class TestSimExtremeIndexing extends SimSolrCloudTestCase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private static final int SPEED = 500;
+ private static final int NUM_NODES = 200;
+
+ private static final long BATCH_SIZE = 200000;
+ private static final long NUM_BATCHES = 5000000;
+ private static final long ABOVE_SIZE = 20000000;
+
+
+ private static TimeSource timeSource;
+ private static SolrClient solrClient;
+
+ @BeforeClass
+ public static void setupCluster() throws Exception {
+ configureCluster(NUM_NODES, TimeSource.get("simTime:" + SPEED));
+ timeSource = cluster.getTimeSource();
+ solrClient = cluster.simGetSolrClient();
+ cluster.simSetUseSystemCollection(false);
+ }
+
+ @AfterClass
+ public static void tearDownCluster() throws Exception {
+ solrClient = null;
+ }
+
+ @Test
+ public void testScaleUp() throws Exception {
+ String collectionName = "testScaleUp_collection";
+ CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName,
+ "conf", 2, 2).setMaxShardsPerNode(10);
+ create.process(solrClient);
+ CloudTestUtils.waitForState(cluster, "failed to create " + collectionName, collectionName,
+ CloudTestUtils.clusterShape(2, 2, false, true));
+
+ //long waitForSeconds = 3 + random().nextInt(5);
+ long waitForSeconds = 1;
+ String setTriggerCommand = "{" +
+ "'set-trigger' : {" +
+ "'name' : 'scaleUpTrigger'," +
+ "'event' : 'indexSize'," +
+ "'waitFor' : '" + waitForSeconds + "s'," +
+ "'aboveDocs' : " + ABOVE_SIZE + "," +
+ "'enabled' : true," +
+ "'actions' : [{'name' : 'compute_plan', 'class' : 'solr.ComputePlanAction'}," +
+ "{'name' : 'execute_plan', 'class' : '" + ExecutePlanAction.class.getName() + "'}]" +
+ "}}";
+ SolrRequest req = createAutoScalingRequest(SolrRequest.METHOD.POST, setTriggerCommand);
+ NamedList<Object> response = solrClient.request(req);
+ assertEquals(response.get("result").toString(), "success");
+
+ long batchSize = BATCH_SIZE;
+ for (long i = 0; i < NUM_BATCHES; i++) {
+ addDocs(collectionName, i * batchSize, batchSize);
+ log.info(String.format(Locale.ROOT, "#### Total docs so far: %,d", ((i + 1) * batchSize)));
+ timeSource.sleep(waitForSeconds);
+ }
+ timeSource.sleep(60000);
+ }
+
+ private void addDocs(String collection, long start, long count) throws Exception {
+ UpdateRequest ureq = new UpdateRequest();
+ ureq.setParam("collection", collection);
+ ureq.setDocIterator(new FakeDocIterator(start, count));
+ solrClient.request(ureq);
+ }
+
+ // lightweight generator of fake documents
+ // NOTE: this iterator only ever returns the same document, which works ok
+ // for our "index update" simulation. Obviously don't use this for real indexing.
+ private static class FakeDocIterator implements Iterator<SolrInputDocument> {
+ final SolrInputDocument doc = new SolrInputDocument();
+ final SolrInputField idField = new SolrInputField("id");
+
+ final long start, count;
+
+ long current, max;
+
+ FakeDocIterator(long start, long count) {
+ this.start = start;
+ this.count = count;
+ current = start;
+ max = start + count;
+ doc.put("id", idField);
+ idField.setValue("foo");
+ }
+
+ @Override
+ public boolean hasNext() {
+ return current < max;
+ }
+
+ @Override
+ public SolrInputDocument next() {
+ current++;
+ return doc;
+ }
+ }
+
+}