You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2020/08/12 16:06:49 UTC

[lucene-solr] branch reference_impl_dev updated (a7bbb0f -> c81c2fd)

This is an automated email from the ASF dual-hosted git repository.

markrmiller pushed a change to branch reference_impl_dev
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git.


 discard a7bbb0f  @534 Little IndexSchema opti.
 discard 812cbad  @533 Always stop an existing background replication if exists when starting a new one.
     new c81c2fd  @533 A thread safe schema is good for everyone!

This update added new revisions after undoing existing revisions.
That is to say, some revisions that were in the old version of the
branch are not in the new version.  This situation occurs
when a user --force pushes a change and generates a repository
containing something like this:

 * -- * -- B -- O -- O -- O   (a7bbb0f)
            \
             N -- N -- N   refs/heads/reference_impl_dev (c81c2fd)

You should already have received notification emails for all of the O
revisions, and so the following emails describe only the N revisions
from the common base, B.

Any revisions marked "omit" are not gone; other references still
refer to them.  Any revisions marked "discard" are gone forever.

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../java/org/apache/solr/schema/IndexSchema.java   | 33 ++++++++++++----------
 .../org/apache/solr/schema/ManagedIndexSchema.java | 28 ++++++++++--------
 2 files changed, 34 insertions(+), 27 deletions(-)


[lucene-solr] 01/01: @533 A thread safe schema is good for everyone!

Posted by ma...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

markrmiller pushed a commit to branch reference_impl_dev
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit c81c2fd625957f4715b427c888fd21ce13a46418
Author: markrmiller@gmail.com <ma...@gmail.com>
AuthorDate: Wed Aug 12 11:06:31 2020 -0500

    @533 A thread safe schema is good for everyone!
---
 .../java/org/apache/solr/cloud/ZkController.java   |  3 ++
 .../java/org/apache/solr/core/CoreContainer.java   |  1 -
 .../java/org/apache/solr/schema/IndexSchema.java   | 61 +++++++++++-----------
 .../org/apache/solr/schema/ManagedIndexSchema.java | 28 +++++-----
 4 files changed, 50 insertions(+), 43 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index e41d1e0..fe10880 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1631,6 +1631,9 @@ public class ZkController implements Closeable {
   public void startReplicationFromLeader(String coreName, boolean switchTransactionLog) throws InterruptedException {
     if (isClosed()) throw new AlreadyClosedException();
     log.info("{} starting background replication from leader", coreName);
+
+    stopReplicationFromLeader(coreName);
+
     ReplicateFromLeader replicateFromLeader = new ReplicateFromLeader(cc, coreName);
 
     ReplicateFromLeader prev = replicateFromLeaders.putIfAbsent(coreName, replicateFromLeader);
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 282ac68..cc79160 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -1688,7 +1688,6 @@ public class CoreContainer implements Closeable {
             }
 
           } else if (replica.getType() == Replica.Type.PULL) {
-            getZkController().stopReplicationFromLeader(core.getName());
             getZkController().startReplicationFromLeader(newCore.getName(), false);
           }
         }
diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
index 74aeac1..220d514 100644
--- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
@@ -23,6 +23,7 @@ import org.apache.lucene.queries.payloads.PayloadDecoder;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.util.Version;
 import org.apache.solr.common.MapSerializable;
+import org.apache.solr.common.ParWork;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
@@ -81,6 +82,7 @@ import java.util.Set;
 import java.util.SortedMap;
 import java.util.TreeMap;
 import java.util.TreeSet;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.function.Function;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
@@ -136,7 +138,9 @@ public class IndexSchema {
   protected final SolrResourceLoader loader;
   protected final Properties substitutableProperties;
 
-  protected Map<String,SchemaField> fields = new HashMap<>(128);
+  // some code will add fields after construction, needs to be thread safe
+  protected final Map<String,SchemaField> fields = new ConcurrentHashMap<>(128);
+
   protected Map<String,FieldType> fieldTypes = new HashMap<>(64);
 
   protected List<SchemaField> fieldsWithDefaultValue = new ArrayList<>(64);
@@ -145,20 +149,20 @@ public class IndexSchema {
 
   public DynamicField[] getDynamicFields() { return dynamicFields; }
 
-  protected Cache<String, SchemaField> dynamicFieldCache = new ConcurrentLRUCache(10000, 8000, 9000,100, false,false, null);
+  protected final Cache<String, SchemaField> dynamicFieldCache = new ConcurrentLRUCache(10000, 8000, 9000,100, false,false, null);
 
   private Analyzer indexAnalyzer;
   private Analyzer queryAnalyzer;
 
   protected List<SchemaAware> schemaAware = new ArrayList<>(64);
 
-  protected Map<String, List<CopyField>> copyFieldsMap = new HashMap<>(64);
-  public Map<String,List<CopyField>> getCopyFieldsMap() { return Collections.unmodifiableMap(copyFieldsMap); }
+  protected Map<String,Set<CopyField>> copyFieldsMap = new ConcurrentHashMap<>(64);
+  public Map<String,Set<CopyField>> getCopyFieldsMap() { return Collections.unmodifiableMap(copyFieldsMap); }
 
   protected DynamicCopy[] dynamicCopyFields = new DynamicCopy[] {};
   public DynamicCopy[] getDynamicCopyFields() { return dynamicCopyFields; }
 
-  private Map<FieldType, PayloadDecoder> decoders = new HashMap<>();  // cache to avoid scanning token filters repeatedly, unnecessarily
+  private Map<FieldType, PayloadDecoder> decoders = new ConcurrentHashMap<>();  // cache to avoid scanning token filters repeatedly, unnecessarily
 
   /**
    * keys are all fields copied to, count is num of copyField
@@ -181,7 +185,6 @@ public class IndexSchema {
     this.resourceName = Objects.requireNonNull(name);
     try {
       readSchema(is);
-      // nocommit
       loader.inform(loader);
     } catch (IOException e) {
       throw new RuntimeException(e);
@@ -634,8 +637,13 @@ public class IndexSchema {
 
   protected void postReadInform() {
     //Run the callbacks on SchemaAware now that everything else is done
-    for (SchemaAware aware : schemaAware) {
-      aware.inform(this);
+    try (ParWork work = new ParWork(this)) {
+      for (SchemaAware aware : schemaAware) {
+        work.collect(() -> {
+          aware.inform(this);
+        });
+      }
+      work.addCollect("postReadInform");
     }
   }
 
@@ -826,10 +834,10 @@ public class IndexSchema {
    * Register one or more new Dynamic Fields with the Schema.
    * @param fields The sequence of {@link org.apache.solr.schema.SchemaField}
    */
-  public void registerDynamicFields(SchemaField... fields) {
-    List<DynamicField> dynFields = Collections.synchronizedList(new ArrayList<>(asList(dynamicFields)));
+  public synchronized void registerDynamicFields(SchemaField... fields) {
+    List<DynamicField> dynFields = new ArrayList<>(asList(dynamicFields));
     for (SchemaField field : fields) {
-      if (isDuplicateDynField(dynFields, field)) { Collections.synchronizedList(new ArrayList<>(asList(dynamicFields)));
+      if (isDuplicateDynField(dynFields, field)) { new ArrayList<>(asList(dynamicFields));
         if (log.isDebugEnabled()) {
           log.debug("dynamic field already exists: dynamic field: [{}]", field.getName());
         }
@@ -859,16 +867,7 @@ public class IndexSchema {
     registerCopyField(source, dest, CopyField.UNLIMITED);
   }
 
-  /**
-   * <p>
-   * NOTE: this function is not thread safe.  However, it is safe to use within the standard
-   * <code>inform( SolrCore core )</code> function for <code>SolrCoreAware</code> classes.
-   * Outside <code>inform</code>, this could potentially throw a ConcurrentModificationException
-   * </p>
-   * 
-   * @see SolrCoreAware
-   */
-  public void registerCopyField(String source, String dest, int maxChars) {
+  public synchronized void registerCopyField(String source, String dest, int maxChars) {
     log.debug("{} {}='{}' {}='{}' {}='{}'", COPY_FIELD, SOURCE, source, DESTINATION, dest
               ,MAX_CHARS, maxChars);
 
@@ -975,9 +974,10 @@ public class IndexSchema {
   }
 
   protected void registerExplicitSrcAndDestFields(String source, int maxChars, SchemaField destSchemaField, SchemaField sourceSchemaField) {
-    List<CopyField> copyFieldList = copyFieldsMap.get(source);
+    Set<CopyField> copyFieldList = copyFieldsMap.get(source);
     if (copyFieldList == null) {
-      copyFieldList = new ArrayList<>();
+      copyFieldList = ConcurrentHashMap
+          .newKeySet();
       copyFieldsMap.put(source, copyFieldList);
     }
     copyFieldList.add(new CopyField(sourceSchemaField, destSchemaField, maxChars));
@@ -1349,7 +1349,7 @@ public class IndexSchema {
       return Collections.emptyList();
     }
     List<String> fieldNames = new ArrayList<>();
-    for (Map.Entry<String, List<CopyField>> cfs : copyFieldsMap.entrySet()) {
+    for (Map.Entry<String,Set<CopyField>> cfs : copyFieldsMap.entrySet()) {
       for (CopyField copyField : cfs.getValue()) {
         if (copyField.getDestination().getName().equals(destField)) {
           fieldNames.add(copyField.getSource().getName());
@@ -1378,7 +1378,7 @@ public class IndexSchema {
         result.add(new CopyField(getField(sourceField), dynamicCopy.getTargetField(sourceField), dynamicCopy.maxChars));
       }
     }
-    List<CopyField> fixedCopyFields = copyFieldsMap.get(sourceField);
+    Set<CopyField> fixedCopyFields = copyFieldsMap.get(sourceField);
     if (null != fixedCopyFields) {
       result.addAll(fixedCopyFields);
     }
@@ -1549,14 +1549,15 @@ public class IndexSchema {
   public List<SimpleOrderedMap<Object>> getCopyFieldProperties
       (boolean showDetails, Set<String> requestedSourceFields, Set<String> requestedDestinationFields) {
     List<SimpleOrderedMap<Object>> copyFieldProperties = new ArrayList<>();
-    SortedMap<String,List<CopyField>> sortedCopyFields = new TreeMap<>(copyFieldsMap);
-    for (List<CopyField> copyFields : sortedCopyFields.values()) {
-      copyFields = new ArrayList<>(copyFields);
-      Collections.sort(copyFields, (cf1, cf2) -> {
+    SortedMap<String,Set<CopyField>> sortedCopyFields = new TreeMap<>(copyFieldsMap);
+    for (Set<CopyField> copyFields : sortedCopyFields.values()) {
+      List<CopyField> cFields = new ArrayList<>(copyFields.size());
+      cFields.addAll(copyFields);
+      Collections.sort(cFields, (cf1, cf2) -> {
         // sources are all the same, just sorting by destination here
         return cf1.getDestination().getName().compareTo(cf2.getDestination().getName());
       });
-      for (CopyField copyField : copyFields) {
+      for (CopyField copyField : cFields) {
         final String source = copyField.getSource().getName();
         final String destination = copyField.getDestination().getName();
         if (   (null == requestedSourceFields      || requestedSourceFields.contains(source))
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
index 8898dbe..61ea8a3 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
@@ -34,6 +34,7 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Future;
@@ -527,8 +528,8 @@ public final class ManagedIndexSchema extends IndexSchema {
       newSchema.copyFieldTargetCounts.remove(oldField); // zero out target count for this field
 
       // Remove copy fields where the target is this field; remember them to rebuild
-      for (Map.Entry<String,List<CopyField>> entry : newSchema.copyFieldsMap.entrySet()) {
-        List<CopyField> perSourceCopyFields = entry.getValue();
+      for (Map.Entry<String,Set<CopyField>> entry : newSchema.copyFieldsMap.entrySet()) {
+        Set<CopyField> perSourceCopyFields = entry.getValue();
         Iterator<CopyField> checkDestCopyFieldsIter = perSourceCopyFields.iterator();
         while (checkDestCopyFieldsIter.hasNext()) {
           CopyField checkDestCopyField = checkDestCopyFieldsIter.next();
@@ -878,7 +879,7 @@ public final class ManagedIndexSchema extends IndexSchema {
     if (!found) {
       // non-dynamic copy field directive.
       // Here, source field could either exists in schema or match a dynamic rule
-      List<CopyField> copyFieldList = copyFieldsMap.get(source);
+      Set<CopyField> copyFieldList = copyFieldsMap.get(source);
       if (copyFieldList != null) {
         for (Iterator<CopyField> iter = copyFieldList.iterator() ; iter.hasNext() ; ) {
           CopyField copyField = iter.next();
@@ -905,7 +906,7 @@ public final class ManagedIndexSchema extends IndexSchema {
    * and adds the removed copy fields to removedCopyFields.
    */
   private void removeCopyFieldSource(String sourceFieldName, List<CopyField> removedCopyFields) {
-    List<CopyField> sourceCopyFields = copyFieldsMap.remove(sourceFieldName);
+    Set<CopyField> sourceCopyFields = copyFieldsMap.remove(sourceFieldName);
     if (null != sourceCopyFields) {
       for (CopyField sourceCopyField : sourceCopyFields) {
         decrementCopyFieldTargetCount(sourceCopyField.getDestination());
@@ -1031,12 +1032,15 @@ public final class ManagedIndexSchema extends IndexSchema {
     return newSchema;
   }
   
-  private Map<String,List<CopyField>> cloneCopyFieldsMap(Map<String,List<CopyField>> original) {
-    Map<String,List<CopyField>> clone = new HashMap<>(original.size());
-    Iterator<Map.Entry<String,List<CopyField>>> iterator = original.entrySet().iterator();
+  private Map<String,Set<CopyField>> cloneCopyFieldsMap(Map<String,Set<CopyField>> original) {
+    Map<String,Set<CopyField>> clone = new ConcurrentHashMap<>(original.size());
+    Iterator<Map.Entry<String,Set<CopyField>>> iterator = original.entrySet().iterator();
     while (iterator.hasNext()) {
-      Map.Entry<String,List<CopyField>> entry = iterator.next();
-      clone.put(entry.getKey(), new ArrayList<>(entry.getValue()));
+      Map.Entry<String,Set<CopyField>> entry = iterator.next();
+      Set<CopyField> copyFields = ConcurrentHashMap
+          .newKeySet(entry.getValue().size());
+      copyFields.addAll(entry.getValue());
+      clone.put(entry.getKey(), copyFields);
     }
     return clone;
   }
@@ -1101,10 +1105,10 @@ public final class ManagedIndexSchema extends IndexSchema {
         newSchema.fields.put(replacementField.getName(), replacementField);
       }
       // Remove copy fields where the target is of the type being replaced; remember them to rebuild
-      Iterator<Map.Entry<String,List<CopyField>>> copyFieldsMapIter = newSchema.copyFieldsMap.entrySet().iterator();
+      Iterator<Map.Entry<String,Set<CopyField>>> copyFieldsMapIter = newSchema.copyFieldsMap.entrySet().iterator();
       while (copyFieldsMapIter.hasNext()) {
-        Map.Entry<String,List<CopyField>> entry = copyFieldsMapIter.next();
-        List<CopyField> perSourceCopyFields = entry.getValue();
+        Map.Entry<String,Set<CopyField>> entry = copyFieldsMapIter.next();
+        Set<CopyField> perSourceCopyFields = entry.getValue();
         Iterator<CopyField> checkDestCopyFieldsIter = perSourceCopyFields.iterator();
         while (checkDestCopyFieldsIter.hasNext()) {
           CopyField checkDestCopyField = checkDestCopyFieldsIter.next();