You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2018/12/17 23:28:28 UTC

[1/4] lucene-solr:jira/solr-12259: Initial port, still broken.

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-12259 [created] 010ca9e54


Initial port, still broken.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/37bbad35
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/37bbad35
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/37bbad35

Branch: refs/heads/jira/solr-12259
Commit: 37bbad35ef132954ad8204615925d7c249b72aa7
Parents: 17fca05
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Dec 10 11:50:13 2018 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Dec 10 11:50:13 2018 +0100

----------------------------------------------------------------------
 .../index/AddDocValuesMergePolicyFactory.java   | 426 +++++++++++++++++++
 .../solr/index/PluggableMergePolicyFactory.java | 119 ++++++
 .../UninvertDocValuesMergePolicyFactory.java    | 100 +----
 .../index/UninvertingFilterCodecReader.java     | 107 +++++
 .../conf/schema-docValuesUpgrade.xml            |  76 ++++
 .../solrconfig-pluggablemergepolicyfactory.xml  |  39 ++
 .../solr/index/ConcurrentIndexUpgradeTest.java  | 281 ++++++++++++
 .../solr/index/IndexUpgradeIntegrationTest.java | 151 +++++++
 .../solr/index/PluggableMergePolicyTest.java    | 207 +++++++++
 .../cloud/AbstractFullDistribZkTestBase.java    |  54 +++
 10 files changed, 1461 insertions(+), 99 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
new file mode 100644
index 0000000..9cfee8c
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
@@ -0,0 +1,426 @@
+package org.apache.solr.index;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Function;
+
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FilterMergePolicy;
+import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.MergeTrigger;
+import org.apache.lucene.index.SegmentCommitInfo;
+import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.index.SegmentReader;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.util.Version;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.uninverting.UninvertingReader;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * A merge policy that can detect schema changes and  write docvalues into merging segments when a field has docvalues enabled
+ * Using UninvertingReader.
+ *
+ * This merge policy uses wrapped MergePolicy (default is TieredMergePolicy) for selecting regular merge segments
+ *
+ */
+public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final boolean skipIntegrityCheck;
+  private final boolean noMerge;
+  private final String marker;
+  private final Function<String, UninvertingReader.Type> schemaUninversionMapper;
+
+  public static final String SKIP_INTEGRITY_CKECK_PROP = "skipIntegrityCheck";
+  public static final String MARKER_PROP = "marker";
+  public static final String NO_MERGE_PROP = "noMerge";
+
+  public static final String DIAGNOSTICS_MARKER_PROP = "__addDVMarker__";
+  public static final String DEFAULT_MARKER = AddDVOneMerge.class.getSimpleName();
+
+
+  public AddDocValuesMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+    super(resourceLoader, args, schema);
+    schemaUninversionMapper = name -> {
+      SchemaField sf = schema.getFieldOrNull(name);
+      if (sf == null) {
+        return null;
+      }
+      if (sf.hasDocValues()) {
+        return sf.getType().getUninversionType(sf);
+      } else {
+        return null;
+      }
+    };
+    final Boolean sic = (Boolean)args.remove(SKIP_INTEGRITY_CKECK_PROP);
+    if (sic != null) {
+      this.skipIntegrityCheck = sic.booleanValue();
+    } else {
+      this.skipIntegrityCheck = false;
+    }
+    Object m = args.remove(MARKER_PROP);
+    if (m != null) {
+      this.marker = String.valueOf(m);
+    } else {
+      this.marker = DEFAULT_MARKER;
+    }
+    final Boolean nm = (Boolean)args.remove(NO_MERGE_PROP);
+    if (nm != null) {
+      this.noMerge = nm.booleanValue();
+    } else {
+      this.noMerge = false;
+    }
+    if (!args.keys().isEmpty()) {
+      throw new IllegalArgumentException("Arguments were "+args+" but "+getClass().getSimpleName()+" takes no such arguments.");
+    }
+
+    log.info("Using args: marker={}, noMerge={}, skipIntegrityCheck={}", marker, noMerge, skipIntegrityCheck);
+  }
+
+  /**
+   * Whether or not the wrapped docValues producer should check consistency
+   */
+  public boolean getSkipIntegrityCheck() {
+    return skipIntegrityCheck;
+  }
+
+  /**
+   * Marker to use for marking already converted segments.
+   * If not null then only segments that don't contain this marker value will be rewritten.
+   * If null then only segments without any marker value will be rewritten.
+   */
+  public String getMarker() {
+    return marker;
+  }
+
+  @Override
+  public MergePolicy getMergePolicyInstance(MergePolicy wrappedMP) {
+    return new AddDVMergePolicy(wrappedMP, getUninversionMapper(), marker, noMerge, skipIntegrityCheck);
+  }
+
+  private Function<FieldInfo, UninvertingReader.Type> getUninversionMapper() {
+    return fi -> {
+      if (UninvertingReader.shouldWrap(fi, schemaUninversionMapper) != null) {
+        return schemaUninversionMapper.apply(fi.name);
+      } else {
+        return null;
+      }
+    };
+  }
+
+
+  public static class AddDVMergePolicy extends FilterMergePolicy {
+    private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+    private final Function<FieldInfo, UninvertingReader.Type> mapping;
+    private final String marker;
+    private final boolean skipIntegrityCheck;
+    private final boolean noMerge;
+    private final Map<String, Object> stats = new ConcurrentHashMap<>();
+
+    public AddDVMergePolicy(MergePolicy in, Function<FieldInfo, UninvertingReader.Type> mapping, String marker, boolean noMerge, boolean skipIntegrityCheck) {
+      super(in);
+      this.mapping = mapping;
+      this.marker = marker;
+      this.noMerge = noMerge;
+      this.skipIntegrityCheck = skipIntegrityCheck;
+    }
+
+    public Map<String, Object> getStatistics() {
+      return stats;
+    }
+
+    private void count(String name) {
+      count(name, 1);
+    }
+
+    private void count(String name, int delta) {
+      AtomicInteger counter = (AtomicInteger)stats.computeIfAbsent(name, n -> new AtomicInteger());
+      counter.addAndGet(delta);
+    }
+
+    private MergeSpecification wrapMergeSpecification(String mergeType, MergeSpecification spec) throws IOException {
+      if (spec == null || spec.merges.isEmpty()) {
+        count("emptyMerge");
+        return spec;
+      }
+      StringBuilder sb = new StringBuilder();
+      count("mergesTotal", spec.merges.size());
+      count(mergeType);
+      for (int i = 0; i < spec.merges.size(); i++) {
+        OneMerge oneMerge = spec.merges.get(i);
+        if (oneMerge instanceof AddDVOneMerge) { // already wrapping
+          continue;
+        }
+        int needWrapping = 0;
+        sb.setLength(0);
+        for (SegmentCommitInfo info : oneMerge.segments) {
+          String source = info.info.getDiagnostics().get("source");
+          String clazz = info.info.getDiagnostics().get("class");
+          if (clazz == null) {
+            clazz = "?";
+          }
+          String shouldRewrite = shouldRewrite(info);
+          if (shouldRewrite != null) {
+            needWrapping++;
+            if (sb.length() > 0) {
+              sb.append(' ');
+            }
+            sb.append(info.toString() + "(" + source + "," + shouldRewrite + "," + clazz + ")");
+          }
+        }
+        if (needWrapping > 0) {
+          log.info("-- OneMerge needs wrapping ({}/{}): {}", needWrapping, oneMerge.segments.size(), sb.toString());
+          OneMerge wrappedOneMerge = new AddDVOneMerge(oneMerge.segments, mapping, marker, skipIntegrityCheck,
+              "mergeType", mergeType, "needWrapping", String.valueOf(needWrapping), "wrapping", sb.toString());
+          spec.merges.set(i, wrappedOneMerge);
+          count("segmentsWrapped", needWrapping);
+          count("mergesWrapped");
+        } else {
+          log.info("-- OneMerge doesn't need wrapping {}", oneMerge.segments);
+          OneMerge nonWrappedOneMerge = new OneMerge(oneMerge.segments) {
+            @Override
+            public void setMergeInfo(SegmentCommitInfo info) {
+              super.setMergeInfo(info);
+              if (marker != null) {
+                info.info.getDiagnostics().put(DIAGNOSTICS_MARKER_PROP, marker);
+              }
+              info.info.getDiagnostics().put("class", oneMerge.getClass().getSimpleName() + "-nonWrapped");
+              info.info.getDiagnostics().put("mergeType", mergeType);
+              info.info.getDiagnostics().put("segString", AddDVMergePolicy.segString(oneMerge));
+            }
+          };
+          spec.merges.set(i, nonWrappedOneMerge);
+          count("mergesUnwrapped");
+        }
+      }
+      return spec;
+    }
+
+    private static String segString(OneMerge oneMerge) {
+      StringBuilder b = new StringBuilder();
+      final int numSegments = oneMerge.segments.size();
+      for(int i=0;i<numSegments;i++) {
+        if (i > 0) {
+          b.append('\n');
+        }
+        b.append(oneMerge.segments.get(i).toString());
+        b.append('#');
+        Map<String, String> diag = oneMerge.segments.get(i).info.getDiagnostics();
+        b.append(diag.get("source"));
+        if (diag.get("class") != null) {
+          b.append('#');
+          b.append(diag.get("class"));
+          if (diag.get("segString") != null) {
+            b.append("#ss=");
+            b.append(diag.get("segString"));
+          }
+        }
+      }
+      return b.toString();
+
+    }
+
+    @Override
+    public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext ctx) throws IOException {
+      if (noMerge) {
+        count("noMerge");
+        log.debug("findMerges: skipping, noMerge set");
+        return null;
+      }
+      MergeSpecification spec = super.findMerges(mergeTrigger, segmentInfos, ctx);
+      return wrapMergeSpecification("findMerges", spec);
+    }
+
+    /**
+     *
+     * @param info The segment to be examined
+     * @return non-null string indicating the reason for rewriting. Eg. if the schema has docValues=true
+     *        for at least one field and the segment does _not_ have docValues information for that field,
+     *        therefore it should be rewritten. This will also return non-null if there's a marker mismatch.
+     */
+
+    private String shouldRewrite(SegmentCommitInfo info) {
+      // Need to get a reader for this segment
+      try (SegmentReader reader = new SegmentReader(info, Version.LUCENE_8_0_0.major, IOContext.DEFAULT)) {
+        // check the marker, if defined
+        String existingMarker = info.info.getDiagnostics().get(DIAGNOSTICS_MARKER_PROP);
+        String source = info.info.getDiagnostics().get("source");
+        // always rewrite if markers don't match?
+//        if (!"flush".equals(source) && marker != null && !marker.equals(existingMarker)) {
+//          return "marker";
+//        }
+        StringBuilder sb = new StringBuilder();
+        for (FieldInfo fi : reader.getFieldInfos()) {
+          if (fi.getDocValuesType() != DocValuesType.NONE) {
+            Map<String, Object> dvStats = UninvertingReader.getDVStats(reader, fi);
+            if (!((Integer)dvStats.get("numDocs")).equals((Integer)dvStats.get("present"))) {
+              throw new RuntimeException("segment: " + info.toString() + " " + fi.name + ", dvStats: " + dvStats + " diag: " + info.info.getDiagnostics());
+            }
+          }
+          if (mapping.apply(fi) != null) {
+            if (sb.length() > 0) {
+              sb.append(',');
+            }
+            sb.append(fi.name);
+          }
+        }
+//        return sb.toString();
+        return sb.length() > 0 ? sb.toString() : null;
+      } catch (IOException e) {
+        // It's safer to rewrite the segment if there's an error, although it may lead to a lot of work.
+        log.warn("Error opening a reader for segment {}, will rewrite segment", info.toString());
+        count("shouldRewriteError");
+        return "error " + e.getMessage();
+      }
+    }
+
+    @Override
+    public MergeSpecification findForcedMerges(SegmentInfos infos, int maxSegmentCount, Map<SegmentCommitInfo, Boolean> segmentsToMerge, MergeContext ctx) throws IOException {
+      if (noMerge) {
+        log.debug("findForcedMerges: skipping, noMerge set");
+        return null;
+      }
+      MergeSpecification spec = super.findForcedMerges(infos, maxSegmentCount, segmentsToMerge, ctx);
+      if (spec == null) {
+        spec = new MergeSpecification();
+      }
+
+      // now find the stragglers and add them individually to the merge
+
+      // don't take into account segments that are already being merged
+      final Set<SegmentCommitInfo> merging = new HashSet<>(ctx.getMergingSegments());
+      // nor the ones already to be wrapped
+      for (OneMerge om : spec.merges) {
+        merging.addAll(om.segments);
+      }
+
+      Iterator<SegmentCommitInfo> iter = infos.iterator();
+      while (iter.hasNext()) {
+        SegmentCommitInfo info = iter.next();
+        final Boolean isOriginal = segmentsToMerge.get(info);
+        if (isOriginal == null || isOriginal == Boolean.FALSE || merging.contains(info)) {
+          continue;
+        } else {
+          String shouldRewrite = shouldRewrite(info);
+          if (shouldRewrite != null) {
+            count("forcedMergeWrapped");
+            log.info("--straggler {}", info.toString());
+            spec.add(new AddDVOneMerge(Collections.singletonList(info), mapping, marker, skipIntegrityCheck,
+                "mergeType", "straggler:" + shouldRewrite));
+          }
+        }
+      }
+      if (spec.merges.isEmpty()) {
+        spec = null;
+      }
+
+      return wrapMergeSpecification("findForcedMerges", spec);
+    }
+
+    @Override
+    public MergeSpecification findForcedDeletesMerges(SegmentInfos infos, MergeContext ctx) throws IOException {
+      if (noMerge) {
+        log.debug("findForcedDeletesMerges: skipping, noMerge set");
+        return null;
+      }
+      MergeSpecification spec = super.findForcedDeletesMerges(infos, ctx);
+      return wrapMergeSpecification("findForcedDeletesMerges", spec);
+    }
+  }
+
+  private static class AddDVOneMerge extends MergePolicy.OneMerge {
+
+    private final String marker;
+    private final Function<FieldInfo, UninvertingReader.Type> mapping;
+    private final boolean skipIntegrityCheck;
+    private final String[] metaPairs;
+
+    public AddDVOneMerge(List<SegmentCommitInfo> segments, Function<FieldInfo, UninvertingReader.Type> mapping, String marker,
+                         final boolean skipIntegrityCheck, String... metaPairs) {
+      super(segments);
+      this.mapping = mapping;
+      this.marker = marker;
+      this.skipIntegrityCheck = skipIntegrityCheck;
+      this.metaPairs = metaPairs;
+    }
+
+    @Override
+    public CodecReader wrapForMerge(CodecReader reader) throws IOException {
+      // Wrap the reader with an uninverting reader if
+      // Schema says there should be
+      // NOTE: this converts also fields that already have docValues to
+      // update their values to the current schema type
+
+
+      Map<String, UninvertingReader.Type> uninversionMap = null;
+
+      for (FieldInfo fi : reader.getFieldInfos()) {
+        final UninvertingReader.Type type = mapping.apply(fi);
+        if (type != null) {
+          if (uninversionMap == null) {
+            uninversionMap = new HashMap<>();
+          }
+          uninversionMap.put(fi.name, type);
+        }
+      }
+
+      if (uninversionMap == null) {
+        log.info("-- reader unwrapped: " + reader);
+        return reader; // Default to normal reader if nothing to uninvert
+      } else {
+        log.info("-- reader wrapped " + reader);
+        return new UninvertingFilterCodecReader(reader, uninversionMap, skipIntegrityCheck);
+      }
+    }
+
+    @Override
+    public void setMergeInfo(SegmentCommitInfo info) {
+      super.setMergeInfo(info);
+      info.info.getDiagnostics().put(DIAGNOSTICS_MARKER_PROP, marker);
+      info.info.getDiagnostics().put("class", getClass().getSimpleName());
+      info.info.getDiagnostics().put("segString", AddDVMergePolicy.segString(this));
+      if (metaPairs != null && metaPairs.length > 1) {
+        int len = metaPairs.length;
+        if ((metaPairs.length % 2) != 0) {
+          len--;
+        }
+        for (int i = 0; i < len; i += 2) {
+          info.info.getDiagnostics().put(metaPairs[i], metaPairs[i + 1]);
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/java/org/apache/solr/index/PluggableMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/PluggableMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/PluggableMergePolicyFactory.java
new file mode 100644
index 0000000..54708bc
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/index/PluggableMergePolicyFactory.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.index;
+
+import java.util.Map;
+
+import org.apache.lucene.index.MergePolicy;
+import org.apache.solr.cloud.CloudDescriptor;
+import org.apache.solr.cloud.ZkSolrResourceLoader;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.PluginInfo;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.util.plugin.SolrCoreAware;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class PluggableMergePolicyFactory extends SimpleMergePolicyFactory implements SolrCoreAware {
+  public static Logger log = LoggerFactory.getLogger(PluggableMergePolicyFactory.class);
+
+  public static final String MERGE_POLICY_PROP = "ext.mergePolicyFactory.collections.";
+  public static final String DEFAULT_POLICY_PROP = "ext.mergePolicyFactory.default";
+  private final MergePolicyFactory defaultMergePolicyFactory;
+  private PluginInfo pluginInfo;
+
+  public PluggableMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+    super(resourceLoader, args, schema);
+    defaultMergePolicyFactory = new TieredMergePolicyFactory(resourceLoader, args, schema);
+  }
+
+  @Override
+  public void inform(SolrCore core) {
+    CloudDescriptor cd = core.getCoreDescriptor().getCloudDescriptor();
+    if (cd == null) {
+      log.info("Solr not in Cloud mode - using default MergePolicy");
+      return;
+    }
+    // we can safely assume here that our loader is ZK enabled
+    ZkStateReader zkStateReader = ((ZkSolrResourceLoader)resourceLoader).getZkController().getZkStateReader();
+    Map<String, Object> clusterProps = zkStateReader.getClusterProperties();
+    log.debug("-- clusterprops: {}", clusterProps);
+    String propName = MERGE_POLICY_PROP + cd.getCollectionName();
+    Object o = clusterProps.get(propName);
+    if (o == null) {
+      // try getting the default one
+      o = clusterProps.get(DEFAULT_POLICY_PROP);
+      if (o != null) {
+        log.debug("Using default MergePolicy configured in cluster properties.");
+      }
+    } else {
+      log.debug("Using collection-specific MergePolicy configured in cluster properties.");
+    }
+    if (o == null) {
+      log.info("No configuration in cluster properties - using default MergePolicy.");
+      return;
+    }
+    Map<String, Object> props = null;
+    if (o instanceof String) {
+      // JSON string
+      props = (Map<String, Object>)Utils.fromJSONString(String.valueOf(o));
+    } else if (o instanceof Map) {
+      props = (Map)o;
+    }
+    if (!props.containsKey(FieldType.CLASS_NAME)) {
+      log.error("MergePolicy plugin info missing class name, using default: " + props);
+      return;
+    }
+    log.info("Using pluggable MergePolicy: {}", props.get(FieldType.CLASS_NAME));
+    pluginInfo = new PluginInfo("mergePolicyFactory", props);
+  }
+
+  private static final String NO_SUB_PACKAGES[] = new String[0];
+
+  @Override
+  protected MergePolicy getMergePolicyInstance() {
+    if (pluginInfo != null) {
+      String mpfClassName = pluginInfo.className;
+      MergePolicyFactoryArgs mpfArgs = pluginInfo.initArgs != null ?
+          new MergePolicyFactoryArgs(pluginInfo.initArgs) : new MergePolicyFactoryArgs();
+      try {
+        MergePolicyFactory policyFactory = resourceLoader.newInstance(
+            mpfClassName,
+            MergePolicyFactory.class,
+            NO_SUB_PACKAGES,
+            new Class[] { SolrResourceLoader.class, MergePolicyFactoryArgs.class, IndexSchema.class },
+            new Object[] { resourceLoader, mpfArgs, schema });
+
+        return policyFactory.getMergePolicy();
+      } catch (Exception e) {
+        log.error("Error instantiating pluggable MergePolicy, using default instead", e);
+        return defaultMergePolicyFactory.getMergePolicy();
+      }
+    } else {
+      return defaultMergePolicyFactory.getMergePolicy();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
index 46f03d3..ec479b8 100644
--- a/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
+++ b/solr/core/src/java/org/apache/solr/index/UninvertDocValuesMergePolicyFactory.java
@@ -21,22 +21,13 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.codecs.DocValuesProducer;
-import org.apache.lucene.index.BinaryDocValues;
 import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.index.FilterCodecReader;
 import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.MergePolicy;
-import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.OneMergeWrappingMergePolicy;
 import org.apache.lucene.index.SegmentCommitInfo;
-import org.apache.lucene.index.SortedDocValues;
-import org.apache.lucene.index.SortedNumericDocValues;
-import org.apache.lucene.index.SortedSetDocValues;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
@@ -119,101 +110,12 @@ public class UninvertDocValuesMergePolicyFactory extends WrapperMergePolicyFacto
       if(uninversionMap == null) {
         return reader; // Default to normal reader if nothing to uninvert
       } else {
-        return new UninvertingFilterCodecReader(reader, uninversionMap);
+        return new UninvertingFilterCodecReader(reader, uninversionMap, skipIntegrityCheck);
       }
       
     }
     
   }
-  
-  
-  /**
-   * Delegates to an Uninverting for fields with docvalues
-   * 
-   * This is going to blow up FieldCache, look into an alternative implementation that uninverts without
-   * fieldcache
-   */
-  private class UninvertingFilterCodecReader extends FilterCodecReader {
-
-    private final LeafReader uninvertingReader;
-    private final DocValuesProducer docValuesProducer;
-
-    public UninvertingFilterCodecReader(CodecReader in, Map<String,UninvertingReader.Type> uninversionMap) {
-      super(in);
-
-      this.uninvertingReader = UninvertingReader.wrap(in, uninversionMap::get);
-      this.docValuesProducer = new DocValuesProducer() {
-
-        @Override
-        public NumericDocValues getNumeric(FieldInfo field) throws IOException {
-          return uninvertingReader.getNumericDocValues(field.name);
-        }
-
-        @Override
-        public BinaryDocValues getBinary(FieldInfo field) throws IOException {
-          return uninvertingReader.getBinaryDocValues(field.name);
-        }
 
-        @Override
-        public SortedDocValues getSorted(FieldInfo field) throws IOException {
-          return uninvertingReader.getSortedDocValues(field.name);
-        }
-
-        @Override
-        public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
-          return uninvertingReader.getSortedNumericDocValues(field.name);
-        }
-
-        @Override
-        public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
-          return uninvertingReader.getSortedSetDocValues(field.name);
-        }
-
-        @Override
-        public void checkIntegrity() throws IOException {
-          if (!skipIntegrityCheck) {
-            uninvertingReader.checkIntegrity();
-          }
-        }
-
-        @Override
-        public void close() throws IOException {
-        }
-
-        @Override
-        public long ramBytesUsed() {
-          return 0;
-        }
-      };
-    }
-    
-    @Override
-    protected void doClose() throws IOException {
-      docValuesProducer.close();
-      uninvertingReader.close();
-      super.doClose();
-    }
-
-    @Override
-    public DocValuesProducer getDocValuesReader() {
-      return docValuesProducer;
-    }
-    
-    @Override
-    public FieldInfos getFieldInfos() {
-      return uninvertingReader.getFieldInfos();
-    }
-
-    @Override
-    public CacheHelper getCoreCacheHelper() {
-      return in.getCoreCacheHelper();
-    }
-
-    @Override
-    public CacheHelper getReaderCacheHelper() {
-      return in.getReaderCacheHelper();
-    }
-    
-  }
 
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/java/org/apache/solr/index/UninvertingFilterCodecReader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/UninvertingFilterCodecReader.java b/solr/core/src/java/org/apache/solr/index/UninvertingFilterCodecReader.java
new file mode 100644
index 0000000..8109a88
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/index/UninvertingFilterCodecReader.java
@@ -0,0 +1,107 @@
+package org.apache.solr.index;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.lucene.codecs.DocValuesProducer;
+import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.FilterCodecReader;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.solr.uninverting.UninvertingReader;
+
+/**
+ * Delegates to an Uninverting for fields with docvalues
+ *
+ * This is going to blow up FieldCache, look into an alternative implementation that uninverts without
+ * fieldcache
+ */
+public class UninvertingFilterCodecReader extends FilterCodecReader {
+
+  private final LeafReader uninvertingReader;
+  private final DocValuesProducer docValuesProducer;
+
+  public UninvertingFilterCodecReader(CodecReader in, Map<String, UninvertingReader.Type> uninversionMap,
+                                      boolean skipIntegrityCheck) {
+    super(in);
+
+    this.uninvertingReader = UninvertingReader.wrap(in, uninversionMap::get);
+    this.docValuesProducer = new DocValuesProducer() {
+
+      @Override
+      public NumericDocValues getNumeric(FieldInfo field) throws IOException {
+        return uninvertingReader.getNumericDocValues(field.name);
+      }
+
+      @Override
+      public BinaryDocValues getBinary(FieldInfo field) throws IOException {
+        return uninvertingReader.getBinaryDocValues(field.name);
+      }
+
+      @Override
+      public SortedDocValues getSorted(FieldInfo field) throws IOException {
+        return uninvertingReader.getSortedDocValues(field.name);
+      }
+
+      @Override
+      public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException {
+        return uninvertingReader.getSortedNumericDocValues(field.name);
+      }
+
+      @Override
+      public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException {
+        return uninvertingReader.getSortedSetDocValues(field.name);
+      }
+
+      @Override
+      public void checkIntegrity() throws IOException {
+        if (!skipIntegrityCheck) {
+          uninvertingReader.checkIntegrity();
+        }
+      }
+
+      @Override
+      public void close() throws IOException {
+      }
+
+      @Override
+      public long ramBytesUsed() {
+        return 0;
+      }
+    };
+  }
+
+  @Override
+  protected void doClose() throws IOException {
+    docValuesProducer.close();
+    uninvertingReader.close();
+    super.doClose();
+  }
+
+  @Override
+  public DocValuesProducer getDocValuesReader() {
+    return docValuesProducer;
+  }
+
+  @Override
+  public FieldInfos getFieldInfos() {
+    return uninvertingReader.getFieldInfos();
+  }
+
+  @Override
+  public CacheHelper getCoreCacheHelper() {
+    return in.getCoreCacheHelper();
+  }
+
+  @Override
+  public CacheHelper getReaderCacheHelper() {
+    return in.getReaderCacheHelper();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/test-files/solr/collection1/conf/schema-docValuesUpgrade.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-docValuesUpgrade.xml b/solr/core/src/test-files/solr/collection1/conf/schema-docValuesUpgrade.xml
new file mode 100644
index 0000000..5f21368
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-docValuesUpgrade.xml
@@ -0,0 +1,76 @@
+<?xml version="1.0" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- The Solr schema file. This file should be named "schema.xml" and
+     should be located where the classloader for the Solr webapp can find it.
+
+     This schema is used for testing, and as such has everything and the
+     kitchen sink thrown in. See example/solr/conf/schema.xml for a
+     more concise example.
+
+  -->
+
+<schema name="schema-docValues" version="1.5">
+  <types>
+
+    <!-- field type definitions... note that the "name" attribute is
+         just a label to be used by field definitions.  The "class"
+         attribute and any other attributes determine the real type and
+         behavior of the fieldType.
+      -->
+
+    <!-- numeric field types that store and index the text
+         value verbatim (and hence don't sort correctly or support range queries.)
+         These are provided more for backward compatability, allowing one
+         to create a schema that matches an existing lucene index.
+    -->
+    <fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+    <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+    <fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+    <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+    <!-- format for date is 1995-12-31T23:59:59.999Z and only the fractional
+         seconds part (.999) is optional.
+      -->
+    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
+
+    <fieldType name="boolean" class="solr.BoolField" />
+    <fieldType name="string" class="solr.StrField" />
+
+    <fieldType name="uuid" class="solr.UUIDField" />
+
+  </types>
+
+
+  <fields>
+
+    <field name="id" type="string" required="true" />
+    <field name="_version_" type="long" indexed="false" stored="true" docValues="true"/>
+
+    <field name="floatdv" type="float" indexed="false" stored="false" docValues="true" default="1" />
+    <field name="intdv" type="int" indexed="false" stored="false" docValues="true" default="2" />
+    <field name="doubledv" type="double" indexed="false" stored="false" docValues="true" default="3" />
+    <field name="longdv" type="long" indexed="false" stored="false" docValues="true" default="4" />
+    <field name="datedv" type="date" indexed="false" stored="false" docValues="true" default="1995-12-31T23:59:59.999Z" />
+
+    <field name="stringdv" type="string" indexed="false" stored="false" docValues="true" default="solr" />
+    <field name="string_add_dv_later" type="string" indexed="true" stored="true" docValues="false"/>
+  </fields>
+
+  <uniqueKey>id</uniqueKey>
+
+</schema>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggablemergepolicyfactory.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggablemergepolicyfactory.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggablemergepolicyfactory.xml
new file mode 100644
index 0000000..73eab3a
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-pluggablemergepolicyfactory.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<config>
+  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
+  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
+  <!-- allow editable schema -->
+  <schemaFactory class="ManagedIndexSchemaFactory">
+    <bool name="mutable">true</bool>
+  </schemaFactory>
+
+  <indexConfig>
+    <useCompoundFile>${useCompoundFile:false}</useCompoundFile>
+    <!--<infoStream>true</infoStream>-->
+    <mergePolicyFactory class="org.apache.solr.index.PluggableMergePolicyFactory">
+    </mergePolicyFactory>
+     
+    <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
+  </indexConfig>
+
+  <requestHandler name="/select" class="solr.SearchHandler"></requestHandler>
+
+</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/test/org/apache/solr/index/ConcurrentIndexUpgradeTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/index/ConcurrentIndexUpgradeTest.java b/solr/core/src/test/org/apache/solr/index/ConcurrentIndexUpgradeTest.java
new file mode 100644
index 0000000..6c6ca30
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/index/ConcurrentIndexUpgradeTest.java
@@ -0,0 +1,281 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.index;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.FilterLeafReader;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SegmentReader;
+import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.request.schema.SchemaRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.schema.SchemaResponse;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.servlet.SolrDispatchFilter;
+import org.apache.solr.util.RefCounted;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+@LuceneTestCase.SuppressCodecs({"Memory", "Direct"})
+public class ConcurrentIndexUpgradeTest extends AbstractFullDistribZkTestBase {
+  private static final Logger log = LoggerFactory.getLogger(ConcurrentIndexUpgradeTest.class);
+
+  private static String ID_FIELD = "id";
+  private static String TEST_FIELD = "string_add_dv_later";
+  private static final int NUM_DOCS = 1000;
+
+  private AtomicBoolean runIndexer = new AtomicBoolean(true);
+
+  public ConcurrentIndexUpgradeTest() {
+    schemaString = "schema-docValues.xml";
+  }
+
+  @BeforeClass
+  public static void setupTest() {
+    System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
+  }
+
+  @AfterClass
+  public static void teardownTest() {
+    System.clearProperty("solr.directoryFactory");
+  }
+
+  @Override
+  protected String getCloudSolrConfig() {
+    return "solrconfig-pluggablemergepolicyfactory.xml";
+  }
+
+
+  @After
+  public void afterTest() throws Exception {
+    runIndexer.set(false);
+  }
+
+  @Test
+  public void testConcurrentIndexUpgrade() throws Exception {
+    String collectionName = "concurrentUpgrade_test";
+    CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest
+        .createCollection(collectionName, "conf1", 2, 1);
+    CollectionAdminResponse response = createCollectionRequest.process(cloudClient);
+    assertEquals(0, response.getStatus());
+    assertTrue(response.isSuccess());
+    Thread.sleep(5000);
+
+    cloudClient.setDefaultCollection(collectionName);
+
+    Thread indexerThread = new Thread() {
+      @Override
+      public void run() {
+        try {
+          int docId = 0;
+          while (runIndexer.get()) {
+            UpdateRequest ureq = new UpdateRequest();
+            for (int i = 0; i < NUM_DOCS; i++) {
+              if (!runIndexer.get()) {
+                return;
+              }
+              SolrInputDocument doc = new SolrInputDocument();
+              doc.addField(ID_FIELD, docId);
+              doc.addField(TEST_FIELD, String.valueOf(docId));
+              ureq.add(doc);
+              docId++;
+            }
+            ureq.process(cloudClient, collectionName);
+            cloudClient.commit(collectionName);
+            Thread.sleep(200);
+          }
+        } catch (Exception e) {
+          log.warn("Can't index documents", e);
+        }
+      }
+    };
+
+    indexerThread.start();
+    // make sure we've indexed some documents
+    Thread.sleep(5000);
+
+    CollectionAdminRequest<CollectionAdminRequest.ColStatus> status = new CollectionAdminRequest.ColStatus()
+        .setCollectionName(collectionName)
+        .setWithFieldInfos(true)
+        .setWithSegments(true);
+    CollectionAdminResponse rsp = status.process(cloudClient);
+    List<String> nonCompliant = (List<String>)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant");
+    assertNotNull("nonCompliant missing: " + rsp, nonCompliant);
+    assertEquals("nonCompliant: " + nonCompliant, 1, nonCompliant.size());
+    assertEquals("nonCompliant: " + nonCompliant, "(NONE)", nonCompliant.get(0));
+
+    // set plugin configuration
+    Map<String, Object> pluginProps = new HashMap<>();
+    pluginProps.put(FieldType.CLASS_NAME, AddDocValuesMergePolicyFactory.class.getName());
+    // prevent merging
+    pluginProps.put(AddDocValuesMergePolicyFactory.NO_MERGE_PROP, true);
+    String propValue = Utils.toJSONString(pluginProps);
+    CollectionAdminRequest.ClusterProp clusterProp = new CollectionAdminRequest.ClusterProp()
+        .setPropertyName(PluggableMergePolicyFactory.MERGE_POLICY_PROP + collectionName)
+        .setPropertyValue(propValue);
+    clusterProp.process(cloudClient);
+
+    log.info("-- completed set cluster props");
+    Thread.sleep(5000);
+
+    // retrieve current schema
+    SchemaRequest schemaRequest = new SchemaRequest();
+    SchemaResponse schemaResponse = schemaRequest.process(cloudClient);
+    Map<String, Object> field = getSchemaField(TEST_FIELD, schemaResponse);
+    assertNotNull("missing " + TEST_FIELD + " field", field);
+    assertEquals("wrong flags: " + field, Boolean.FALSE, field.get("docValues"));
+
+    // update schema
+    field.put("docValues", true);
+    SchemaRequest.ReplaceField replaceRequest = new SchemaRequest.ReplaceField(field);
+    SchemaResponse.UpdateResponse replaceResponse = replaceRequest.process(cloudClient);
+
+    log.info("-- completed schema update");
+
+    // bounce the collection
+    Map<String, Long> urlToTimeBefore = new HashMap<>();
+    collectStartTimes(collectionName, cloudClient, urlToTimeBefore);
+    CollectionAdminRequest<CollectionAdminRequest.Reload> reload = new CollectionAdminRequest.Reload()
+        .setCollectionName(collectionName);
+    rsp = reload.process(cloudClient);
+
+    boolean reloaded = waitForReloads(collectionName, cloudClient, urlToTimeBefore);
+    assertTrue("could not reload collection in time", reloaded);
+
+    log.info("-- completed collection reload");
+
+    // verify that schema doesn't match the actual fields anymore
+    rsp = status.process(cloudClient);
+    log.info("--rsp: {}", rsp);
+    nonCompliant = (List<String>)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant");
+    assertNotNull("nonCompliant missing: " + rsp, nonCompliant);
+    assertEquals("nonCompliant: " + nonCompliant, 1, nonCompliant.size());
+    assertEquals("nonCompliant: " + nonCompliant, TEST_FIELD, nonCompliant.get(0));
+
+
+    // update plugin props to allow merging
+    pluginProps.put(AddDocValuesMergePolicyFactory.NO_MERGE_PROP, false);
+    propValue = Utils.toJSONString(pluginProps);
+    clusterProp = new CollectionAdminRequest.ClusterProp()
+        .setPropertyName(PluggableMergePolicyFactory.MERGE_POLICY_PROP + collectionName)
+        .setPropertyValue(propValue);
+    clusterProp.process(cloudClient);
+
+    log.info("-- completed set cluster props 2");
+
+    urlToTimeBefore = new HashMap<>();
+    collectStartTimes(collectionName, cloudClient, urlToTimeBefore);
+    rsp = reload.process(cloudClient);
+    reloaded = waitForReloads(collectionName, cloudClient, urlToTimeBefore);
+    assertTrue("could not reload collection in time", reloaded);
+
+    // verify that schema doesn't match the actual fields anymore
+    rsp = status.process(cloudClient);
+    nonCompliant = (List<String>)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant");
+    assertNotNull("nonCompliant missing: " + rsp, nonCompliant);
+    assertEquals("nonCompliant: " + nonCompliant, 1, nonCompliant.size());
+    assertEquals("nonCompliant: " + nonCompliant, TEST_FIELD, nonCompliant.get(0));
+
+
+    log.info("-- start optimize");
+    // request optimize to make sure all segments are rewritten
+    cloudClient.optimize(collectionName, true, true, 1);
+    cloudClient.commit();
+
+    log.info("-- completed optimize");
+
+    rsp = status.process(cloudClient);
+    nonCompliant = (List<String>)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant");
+    assertNotNull("nonCompliant missing: " + rsp, nonCompliant);
+    assertEquals("nonCompliant: " + nonCompliant, 1, nonCompliant.size());
+    assertEquals("nonCompliant: " + nonCompliant, "(NONE)", nonCompliant.get(0));
+    runIndexer.set(false);
+
+    // verify that all docs have docValues
+    for (JettySolrRunner jetty : jettys) {
+      CoreContainer cores = ((SolrDispatchFilter)jetty.getDispatchFilter().getFilter()).getCores();
+      for (SolrCore core : cores.getCores()) {
+        RefCounted<SolrIndexSearcher> searcherRef = core.getSearcher();
+        SolrIndexSearcher searcher = searcherRef.get();
+        try {
+          LeafReader reader = searcher.getLeafReader();
+          int maxDoc = reader.maxDoc();
+          SortedDocValues dvs = reader.getSortedDocValues(TEST_FIELD);
+          for (int i = 0; i < maxDoc; i++) {
+            Document d = reader.document(i);
+            BytesRef bytes = dvs.get(i);
+            assertNotNull(bytes);
+            String dvString = bytes.utf8ToString();
+            assertEquals(d.get("id"), dvString);
+          }
+          DirectoryReader directoryReader = searcher.getIndexReader();
+          for (LeafReaderContext leafCtx : directoryReader.leaves()) {
+            LeafReader leaf = leafCtx.reader();
+            while (leaf instanceof FilterLeafReader) {
+              leaf = ((FilterLeafReader)leaf).getDelegate();
+            }
+            assertTrue(leaf instanceof SegmentReader);
+            SegmentReader segmentReader = (SegmentReader)leaf;
+            String marker = segmentReader.getSegmentInfo().info.getDiagnostics().get(AddDocValuesMergePolicyFactory.DIAGNOSTICS_MARKER_PROP);
+            // new flush segments that are fully compliant won't have
+            // the marker because they were not wrapped
+            if (marker != null) {
+              assertEquals(AddDocValuesMergePolicyFactory.DEFAULT_MARKER, marker);
+            }
+          }
+        } finally {
+          searcherRef.decref();
+        }
+      }
+    }
+  }
+
+  private Map<String, Object> getSchemaField(String name, SchemaResponse schemaResponse) {
+    List<Map<String, Object>> fields = schemaResponse.getSchemaRepresentation().getFields();
+    for (Map<String, Object> field : fields) {
+      if (name.equals(field.get("name"))) {
+        return field;
+      }
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/test/org/apache/solr/index/IndexUpgradeIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/index/IndexUpgradeIntegrationTest.java b/solr/core/src/test/org/apache/solr/index/IndexUpgradeIntegrationTest.java
new file mode 100644
index 0000000..a50a8d2
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/index/IndexUpgradeIntegrationTest.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.index;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.schema.SchemaRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.schema.SchemaResponse;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.schema.FieldType;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+public class IndexUpgradeIntegrationTest extends AbstractFullDistribZkTestBase {
+  private static final Logger log = LoggerFactory.getLogger(IndexUpgradeIntegrationTest.class);
+
+  private static String ID_FIELD = "id";
+  private static String TEST_FIELD = "string_add_dv_later";
+
+  public IndexUpgradeIntegrationTest() {
+    schemaString = "schema-docValues.xml";
+  }
+
+  @Override
+  protected String getCloudSolrConfig() {
+    return "solrconfig-pluggablemergepolicyfactory.xml";
+  }
+
+
+  @Test
+  public void testIndexUpgrade() throws Exception {
+    String collectionName = "indexUpgrade_test";
+    CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest
+        .createCollection(collectionName, "conf1", 2, 1);
+    CollectionAdminResponse response = createCollectionRequest.process(cloudClient);
+    assertEquals(0, response.getStatus());
+    assertTrue(response.isSuccess());
+    Thread.sleep(5000);
+
+    // set plugin configuration
+    Map<String, Object> pluginProps = new HashMap<>();
+    pluginProps.put(FieldType.CLASS_NAME, AddDocValuesMergePolicyFactory.class.getName());
+    String propValue = Utils.toJSONString(pluginProps);
+    CollectionAdminRequest.ClusterProp clusterProp = new CollectionAdminRequest.ClusterProp()
+        .setPropertyName(PluggableMergePolicyFactory.MERGE_POLICY_PROP + collectionName)
+        .setPropertyValue(propValue);
+    clusterProp.process(cloudClient);
+
+    log.info("-- completed set cluster props");
+
+
+    cloudClient.setDefaultCollection(collectionName);
+    // this indexes still without DV because plugin props haven't taken effect yet
+
+    // create several segments
+    for (int i = 0; i < 1000; i++) {
+      SolrInputDocument doc = new SolrInputDocument();
+      doc.addField(ID_FIELD, i);
+      doc.addField(TEST_FIELD, String.valueOf(i));
+      cloudClient.add(doc);
+      if (i > 0 && i % 200 == 0) {
+        cloudClient.commit();
+      }
+    }
+    cloudClient.commit();
+
+    // retrieve current schema
+    SchemaRequest schemaRequest = new SchemaRequest();
+    SchemaResponse schemaResponse = schemaRequest.process(cloudClient);
+    Map<String, Object> field = getSchemaField(TEST_FIELD, schemaResponse);
+    assertNotNull("missing " + TEST_FIELD + " field", field);
+    assertEquals("wrong flags: " + field, Boolean.FALSE, field.get("docValues"));
+
+    // update schema
+    field.put("docValues", true);
+    SchemaRequest.ReplaceField replaceRequest = new SchemaRequest.ReplaceField(field);
+    SchemaResponse.UpdateResponse replaceResponse = replaceRequest.process(cloudClient);
+
+    log.info("-- completed schema update");
+
+    // bounce the collection
+    Map<String, Long> urlToTimeBefore = new HashMap<>();
+    collectStartTimes(collectionName, cloudClient, urlToTimeBefore);
+    CollectionAdminRequest.Reload reload = CollectionAdminRequest
+        .reloadCollection(collectionName);
+    reload.process(cloudClient);
+
+    boolean reloaded = waitForReloads(collectionName, cloudClient, urlToTimeBefore);
+    assertTrue("could not reload collection in time", reloaded);
+
+    log.info("-- completed collection reload");
+
+    // verify that schema doesn't match the actual fields anymore
+    CollectionAdminRequest<CollectionAdminRequest.ColStatus> status = new CollectionAdminRequest.ColStatus()
+        .setCollectionName(collectionName)
+        .setWithFieldInfos(true)
+        .setWithSegments(true);
+    CollectionAdminResponse rsp = status.process(cloudClient);
+    List<String> nonCompliant = (List<String>)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant");
+    assertNotNull("nonCompliant missing: " + rsp, nonCompliant);
+    assertEquals("nonCompliant: " + nonCompliant, 1, nonCompliant.size());
+    assertEquals("nonCompliant: " + nonCompliant, TEST_FIELD, nonCompliant.get(0));
+
+    log.info("-- start optimize");
+    // request optimize to make sure all segments are rewritten
+    cloudClient.optimize(collectionName, true, true, 1);
+    cloudClient.commit();
+
+    log.info("-- completed optimize");
+
+    rsp = status.process(cloudClient);
+    nonCompliant = (List<String>)rsp.getResponse().findRecursive(collectionName, "schemaNonCompliant");
+    assertNotNull("nonCompliant missing: " + rsp, nonCompliant);
+    assertEquals("nonCompliant: " + nonCompliant, 1, nonCompliant.size());
+    assertEquals("nonCompliant: " + nonCompliant, "(NONE)", nonCompliant.get(0));
+  }
+
+  private Map<String, Object> getSchemaField(String name, SchemaResponse schemaResponse) {
+    List<Map<String, Object>> fields = schemaResponse.getSchemaRepresentation().getFields();
+    for (Map<String, Object> field : fields) {
+      if (name.equals(field.get("name"))) {
+        return field;
+      }
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/core/src/test/org/apache/solr/index/PluggableMergePolicyTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/index/PluggableMergePolicyTest.java b/solr/core/src/test/org/apache/solr/index/PluggableMergePolicyTest.java
new file mode 100644
index 0000000..533ba53
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/index/PluggableMergePolicyTest.java
@@ -0,0 +1,207 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.index;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.MergeTrigger;
+import org.apache.lucene.index.SegmentCommitInfo;
+import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.index.TieredMergePolicy;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.servlet.SolrDispatchFilter;
+import org.apache.solr.util.RefCounted;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class PluggableMergePolicyTest extends AbstractFullDistribZkTestBase {
+
+  public static class CustomMergePolicyFactory extends MergePolicyFactory {
+
+    public CustomMergePolicyFactory(SolrResourceLoader resourceLoader, MergePolicyFactoryArgs args, IndexSchema schema) {
+      super(resourceLoader, args, schema);
+    }
+
+    @Override
+    public MergePolicy getMergePolicy() {
+      return new CustomMergePolicy();
+    }
+  }
+
+  public static class CustomMergePolicy extends MergePolicy {
+
+    @Override
+    public MergeSpecification findMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext ctx) throws IOException {
+      return null;
+    }
+
+    @Override
+    public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentCommitInfo, Boolean> segmentsToMerge, MergeContext ctx) throws IOException {
+      return null;
+    }
+
+    @Override
+    public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, MergeContext ctx) throws IOException {
+      return null;
+    }
+  }
+
+  @Override
+  protected String getCloudSolrConfig() {
+    return "solrconfig-pluggablemergepolicyfactory.xml";
+  }
+
+  @Test
+  public void testConfigChanges() throws Exception {
+    String collectionName = "pluggableMPF_test";
+    CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest
+        .createCollection(collectionName, "conf1", 2, 1);
+    CollectionAdminResponse response = createCollectionRequest.process(cloudClient);
+    assertEquals(0, response.getStatus());
+    assertTrue(response.isSuccess());
+    Thread.sleep(5000);
+
+    cloudClient.setDefaultCollection(collectionName);
+    Map<String, Object> pluginProps = new HashMap<>();
+    pluginProps.put(FieldType.CLASS_NAME, AddDocValuesMergePolicyFactory.class.getName());
+    String propValue = Utils.toJSONString(pluginProps);
+    CollectionAdminRequest.ClusterProp clusterProp = CollectionAdminRequest
+        .setClusterProperty(PluggableMergePolicyFactory.DEFAULT_POLICY_PROP, propValue);
+    clusterProp.process(cloudClient);
+
+    // no reload -> still using the default MP.
+    // get any core and verify its MergePolicy
+
+    SolrCore core = getAnyCore(collectionName);
+    if (core == null) {
+      fail("can't find any core belonging to the collection " + collectionName);
+    }
+    RefCounted<IndexWriter> writerRef = core.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
+    try {
+      IndexWriter iw = writerRef.get();
+      MergePolicy mp = iw.getConfig().getMergePolicy();
+      assertEquals("default merge policy", TieredMergePolicy.class.getName(), mp.getClass().getName());
+    } finally {
+      writerRef.decref();
+    }
+
+
+    // reloaded cores will be more recent that this time
+    Map<String, Long> urlToTimeBefore = new HashMap<>();
+    collectStartTimes(collectionName, cloudClient, urlToTimeBefore);
+
+    // reload the collection
+    CollectionAdminRequest.Reload reload = CollectionAdminRequest
+        .reloadCollection(collectionName);
+    reload.process(cloudClient);
+
+    boolean reloaded = waitForReloads(collectionName, cloudClient, urlToTimeBefore);
+    assertTrue("not reloaded in time", reloaded);
+
+    UpdateRequest ureq = new UpdateRequest();
+    for (int i = 100; i < 200; i++) {
+      SolrInputDocument doc = new SolrInputDocument();
+      doc.addField("id", i);
+      ureq.add(doc);
+    }
+    ureq.process(cloudClient);
+    cloudClient.commit();
+
+
+    core = getAnyCore(collectionName);
+    if (core == null) {
+      fail("can't find any reloaded core belonging to the collection " + collectionName);
+    }
+    writerRef = core.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
+    try {
+      IndexWriter iw = writerRef.get();
+      MergePolicy mp = iw.getConfig().getMergePolicy();
+      assertEquals("custom merge policy", AddDocValuesMergePolicyFactory.AddDVMergePolicy.class.getName(), mp.getClass().getName());
+    } finally {
+      writerRef.decref();
+    }
+
+    // set collection-specific plugin
+    pluginProps.clear();
+    pluginProps.put(FieldType.CLASS_NAME, CustomMergePolicyFactory.class.getName());
+    propValue = Utils.toJSONString(pluginProps);
+    clusterProp = CollectionAdminRequest
+        .setClusterProperty(PluggableMergePolicyFactory.MERGE_POLICY_PROP + collectionName, propValue);
+    clusterProp.process(cloudClient);
+
+    // reload
+    urlToTimeBefore.clear();
+    collectStartTimes(collectionName, cloudClient, urlToTimeBefore);
+    reload.process(cloudClient);
+
+    reloaded = waitForReloads(collectionName, cloudClient, urlToTimeBefore);
+    assertTrue("not reloaded in time", reloaded);
+    core = getAnyCore(collectionName);
+    if (core == null) {
+      fail("can't find any reloaded core belonging to the collection " + collectionName);
+    }
+    writerRef = core.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
+    try {
+      IndexWriter iw = writerRef.get();
+      MergePolicy mp = iw.getConfig().getMergePolicy();
+      assertEquals("custom merge policy", CustomMergePolicy.class.getName(), mp.getClass().getName());
+    } finally {
+      writerRef.decref();
+    }
+
+
+  }
+
+  private SolrCore getAnyCore(String collectionName) {
+    SolrCore core = null;
+    for (JettySolrRunner jetty : jettys) {
+      CoreContainer cores = jetty.getCoreContainer();
+      Iterator<SolrCore> solrCores = cores.getCores().iterator();
+      while (solrCores.hasNext()) {
+        SolrCore c = solrCores.next();
+        if (c.getCoreDescriptor().getCloudDescriptor() != null &&
+            c.getCoreDescriptor().getCloudDescriptor().getCollectionName().equals(collectionName)) {
+          core = c;
+          break;
+        }
+      }
+      if (core != null) {
+        break;
+      }
+    }
+    return core;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/37bbad35/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index 7df3345..7ff4226 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -55,6 +55,7 @@ import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
+import org.apache.solr.client.solrj.request.CoreStatus;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
@@ -1014,6 +1015,59 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
     super.waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, verbose, true, timeoutSeconds);
   }
 
+  protected boolean waitForReloads(String collectionName, CloudSolrClient client, Map<String,Long> urlToTimeBefore) throws SolrServerException, IOException {
+
+
+    TimeOut timeout = new TimeOut(45, TimeUnit.SECONDS, TimeSource.NANO_TIME);
+
+    boolean allTimesAreCorrect = false;
+    while (! timeout.hasTimedOut()) {
+      Map<String,Long> urlToTimeAfter = new HashMap<>();
+      collectStartTimes(collectionName, client, urlToTimeAfter);
+
+      boolean retry = false;
+      Set<Entry<String,Long>> entries = urlToTimeBefore.entrySet();
+      for (Entry<String,Long> entry : entries) {
+        Long beforeTime = entry.getValue();
+        Long afterTime = urlToTimeAfter.get(entry.getKey());
+        assertNotNull(afterTime);
+        if (afterTime <= beforeTime) {
+          retry = true;
+          break;
+        }
+
+      }
+      if (!retry) {
+        allTimesAreCorrect = true;
+        break;
+      }
+    }
+    return allTimesAreCorrect;
+  }
+
+  protected void collectStartTimes(String collectionName, CloudSolrClient client, Map<String,Long> urlToTime)
+      throws SolrServerException, IOException {
+
+    ClusterState clusterState = cloudClient.getZkStateReader()
+        .getClusterState();
+    DocCollection collectionState = clusterState.getCollectionOrNull(collectionName);
+    if (collectionState != null) {
+      for (Slice shard : collectionState) {
+        for (Replica replica : shard) {
+          ZkCoreNodeProps coreProps = new ZkCoreNodeProps(replica);
+          CoreStatus coreStatus;
+          try (HttpSolrClient server = getHttpSolrClient(coreProps.getBaseUrl())) {
+            coreStatus = CoreAdminRequest.getCoreStatus(coreProps.getCoreName(), false, server);
+          }
+          long before = coreStatus.getCoreStartTime().getTime();
+          urlToTime.put(coreProps.getCoreUrl(), before);
+        }
+      }
+    } else {
+      throw new IllegalArgumentException("Could not find collection " + collectionName);
+    }
+  }
+
   protected void checkQueries() throws Exception {
 
     handle.put("_version_", SKIPVAL);


[3/4] lucene-solr:jira/solr-12259: Add Erick's test code and a pure Lucene test case.

Posted by ab...@apache.org.
Add Erick's test code and a pure Lucene test case.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d2c8e96d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d2c8e96d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d2c8e96d

Branch: refs/heads/jira/solr-12259
Commit: d2c8e96da38f0bcf6e85ecbe1cf59c2eb7c07792
Parents: d5a7b30
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Wed Dec 12 20:29:45 2018 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Wed Dec 12 20:29:45 2018 +0100

----------------------------------------------------------------------
 .../src/java/code/test/AddDVMPLuceneTest.java   | 220 ++++++++++++++++
 .../src/java/code/test/AddDVMPLuceneTest2.java  | 248 +++++++++++++++++++
 solr/core/src/java/code/test/AddDvStress.java   | 230 +++++++++++++++++
 .../src/java/code/test/ConfigChangerThread.java | 145 +++++++++++
 solr/core/src/java/code/test/IndexerThread.java |  88 +++++++
 solr/core/src/java/code/test/QueryThread.java   |  57 +++++
 6 files changed, 988 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2c8e96d/solr/core/src/java/code/test/AddDVMPLuceneTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/code/test/AddDVMPLuceneTest.java b/solr/core/src/java/code/test/AddDVMPLuceneTest.java
new file mode 100644
index 0000000..964a49b
--- /dev/null
+++ b/solr/core/src/java/code/test/AddDVMPLuceneTest.java
@@ -0,0 +1,220 @@
+package code.test;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.function.Function;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SerialMergeScheduler;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TieredMergePolicy;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.solr.index.AddDocValuesMergePolicyFactory;
+import org.apache.solr.uninverting.UninvertingReader;
+
+/**
+ *
+ */
+public class AddDVMPLuceneTest {
+
+  private static final String TEST_PATH = "/Users/ab/tmp/addvtest";
+  private static final Path testPath = Paths.get(TEST_PATH);
+  private static final String TEST_FIELD = "test";
+
+  private static final FieldType noDV = new FieldType();
+  private static final FieldType dv = new FieldType();
+
+  static {
+    noDV.setIndexOptions(IndexOptions.DOCS);
+    dv.setIndexOptions(IndexOptions.DOCS);
+  }
+
+  public static void main(String[] args) throws Exception {
+    AddDVMPLuceneTest test = new AddDVMPLuceneTest();
+    test.doTest();
+  }
+
+  static final String[] facets = new String[] {
+      "aa", "bb", "cc", "dd", "ee", "ff", "gg", "hh", "ii", "jj"
+  };
+  private final Map<String, UninvertingReader.Type> mapping = new ConcurrentHashMap<>();
+  private final AtomicBoolean stopRun = new AtomicBoolean(false);
+
+  private void cleanup() throws Exception {
+    FileUtils.deleteDirectory(testPath.toFile());
+  }
+
+  private void doTest() throws Exception {
+    cleanup();
+    Directory d = FSDirectory.open(testPath);
+    IndexWriterConfig cfg = new IndexWriterConfig(new WhitespaceAnalyzer());
+    cfg.setMergeScheduler(new SerialMergeScheduler());
+    cfg.setMergePolicy(new AddDocValuesMergePolicyFactory.AddDVMergePolicy(new TieredMergePolicy(), mapping::get, null, false, true));
+    cfg.setInfoStream(System.out);
+    ExtIndexWriter iw = new ExtIndexWriter(d, cfg, mapping);
+    for (int i = 0; i < 5; i++) {
+      IndexingThread indexingThread = new IndexingThread("t" + i, iw);
+      indexingThread.start();
+    }
+    QueryThread qt = new QueryThread(iw);
+    qt.start();
+    Thread.sleep(10000);
+    mapping.put(TEST_FIELD, UninvertingReader.Type.LEGACY_INTEGER);
+  }
+
+  private static final class ExtIndexWriter extends IndexWriter {
+
+    /**
+     * Constructs a new IndexWriter per the settings given in <code>conf</code>.
+     * If you want to make "live" changes to this writer instance, use
+     * {@link #getConfig()}.
+     *
+     * <p>
+     * <b>NOTE:</b> after ths writer is created, the given configuration instance
+     * cannot be passed to another writer. If you intend to do so, you should
+     * {@link IndexWriterConfig#clone() clone} it beforehand.
+     *
+     * @param d    the index directory. The index is either created or appended
+     *             according <code>conf.getOpenMode()</code>.
+     * @param conf the configuration settings according to which IndexWriter should
+     *             be initialized.
+     * @throws IOException if the directory cannot be read/written to, or if it does not
+     *                     exist and <code>conf.getOpenMode()</code> is
+     *                     <code>OpenMode.APPEND</code> or if there is any other low-level
+     *                     IO error
+     */
+    public ExtIndexWriter(Directory d, IndexWriterConfig conf, Map<String, UninvertingReader.Type> mapping) throws IOException {
+      super(d, conf);
+      this.mapping = mapping;
+    }
+
+    Map<String, UninvertingReader.Type> mapping;
+
+    public DirectoryReader getReader() throws IOException {
+      flush();
+      try {
+        DirectoryReader reader = DirectoryReader.open(getDirectory());
+        Map<String, UninvertingReader.Type> currentMapping = new HashMap<>(mapping);
+        return UninvertingReader.wrap(reader, currentMapping::get);
+      } catch (Throwable re) {
+        return null;
+      }
+    }
+  }
+
+  private class QueryThread extends Thread {
+    ExtIndexWriter writer;
+    QueryThread(ExtIndexWriter writer) {
+      this.writer = writer;
+    }
+
+    public void run() {
+      while (!stopRun.get() && !Thread.interrupted()) {
+        try {
+          Thread.sleep(5000);
+        } catch (InterruptedException e) {
+          return;
+        }
+        try {
+          DirectoryReader reader = writer.getReader();
+          if (reader == null) {
+            System.err.println("# no reader");
+            continue;
+          }
+          reader = UninvertingReader.wrap(reader, mapping::get);
+          for (LeafReaderContext ctx : reader.leaves()) {
+            checkLeaf(ctx);
+          }
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      }
+    }
+
+    private void checkLeaf(LeafReaderContext ctx) throws IOException {
+      LeafReader reader = ctx.reader();
+      FieldInfo fi = reader.getFieldInfos().fieldInfo(TEST_FIELD);
+      NumericDocValues dv = reader.getNumericDocValues(TEST_FIELD);
+      int present = 0;
+      while (dv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+        present++;
+        Document doc = reader.document(dv.docID());
+        long value = dv.longValue();
+        long stringValue = Long.parseLong(doc.get(TEST_FIELD));
+        if (value != stringValue) {
+          throw new IOException("value mismatch, base=" + ctx.docBase + ", doc=" + dv.docID() + ", string=" + stringValue + ", dv=" + value);
+        }
+
+      }
+      if (present < reader.numDocs()) {
+        throw new IOException("count mismatch: numDocs=" + reader.numDocs() + ", present=" + present + ", reader=" + reader);
+      }
+    }
+  }
+
+  private class IndexingThread extends Thread {
+    IndexWriter writer;
+    String threadId;
+    IndexingThread(String threadId, IndexWriter writer) {
+      this.threadId = threadId;
+      this.writer = writer;
+    }
+
+
+    public void run() {
+      int id = 0;
+      while (!stopRun.get() && !Thread.interrupted()) {
+        Document d = new Document();
+        Field f = new Field("id", id + "-" + threadId, TextField.TYPE_NOT_STORED);
+        d.add(f);
+        UninvertingReader.Type type = mapping.get(TEST_FIELD);
+        if (type != null) {
+          f = new NumericDocValuesField(TEST_FIELD, id);
+        } else {
+          f = new Field(TEST_FIELD, facets[id % 10], TextField.TYPE_NOT_STORED);
+        }
+        d.add(f);
+        try {
+          writer.addDocument(d);
+          if (id > 0 && (id % 10000 == 0)) {
+            System.err.println("- added " + id);
+            // delete first 500
+//            for (int j = id - 10000; j < id - 10000 + 500; j++) {
+//              writer.deleteDocuments(new Term("id", j + "-" + threadId));
+//            }
+            writer.commit();
+            try {
+              Thread.sleep(50);
+            } catch (InterruptedException e) {
+              return;
+            }
+          }
+        } catch (IOException ioe) {
+          throw new RuntimeException("writer.addDocument", ioe);
+        }
+        id++;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2c8e96d/solr/core/src/java/code/test/AddDVMPLuceneTest2.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/code/test/AddDVMPLuceneTest2.java b/solr/core/src/java/code/test/AddDVMPLuceneTest2.java
new file mode 100644
index 0000000..d1fb836
--- /dev/null
+++ b/solr/core/src/java/code/test/AddDVMPLuceneTest2.java
@@ -0,0 +1,248 @@
+package code.test;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldType;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.CodecReader;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TieredMergePolicy;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.BytesRefBuilder;
+import org.apache.solr.index.AddDocValuesMergePolicyFactory;
+import org.apache.solr.legacy.LegacyLongField;
+import org.apache.solr.uninverting.UninvertingReader;
+
+/**
+ *
+ */
+public class AddDVMPLuceneTest2 {
+
+  private static final String TEST_PATH = "/Users/ab/tmp/addvtest";
+  private static final Path testPath = Paths.get(TEST_PATH);
+  private static final String TEST_NUM_FIELD = "testNum";
+  private static final String TEST_STR_FIELD = "testStr";
+
+  private static final FieldType noDV = new FieldType();
+  private static final FieldType dv = new FieldType();
+
+  static {
+    noDV.setIndexOptions(IndexOptions.DOCS);
+    dv.setIndexOptions(IndexOptions.DOCS);
+  }
+
+  public static void main(String[] args) throws Exception {
+    AddDVMPLuceneTest2 test = new AddDVMPLuceneTest2();
+    test.doTest();
+  }
+
+  static final String[] facets = new String[] {
+      "aa", "bb", "cc", "dd", "ee", "ff", "gg", "hh", "ii", "jj"
+  };
+  private final Map<String, UninvertingReader.Type> mapping = new ConcurrentHashMap<>();
+  private final AtomicBoolean stopRun = new AtomicBoolean(false);
+
+  private void cleanup() throws Exception {
+    FileUtils.deleteDirectory(testPath.toFile());
+  }
+
+  private void doTest() throws Exception {
+    cleanup();
+    Directory d = FSDirectory.open(testPath);
+    IndexWriterConfig cfg = new IndexWriterConfig(new WhitespaceAnalyzer());
+    TieredMergePolicy tmp = new TieredMergePolicy();
+    tmp.setMaxMergeAtOnce(2);
+    tmp.setSegmentsPerTier(2.0);
+    tmp.setNoCFSRatio(0.0);
+    cfg.setMergePolicy(new AddDocValuesMergePolicyFactory.AddDVMergePolicy(tmp, mapping::get, null, false, true));
+//    cfg.setMergePolicy(tmp);
+    //cfg.setInfoStream(System.out);
+    cfg.setMaxBufferedDocs(10);
+    ExtIndexWriter iw = new ExtIndexWriter(d, cfg);
+    IndexingThread indexingThread = new IndexingThread("t", iw);
+    indexingThread.start();
+    QueryThread qt = new QueryThread(iw);
+    qt.start();
+    Thread.sleep(5000);
+    mapping.put(TEST_NUM_FIELD, UninvertingReader.Type.LEGACY_LONG);
+    mapping.put(TEST_STR_FIELD, UninvertingReader.Type.SORTED);
+  }
+
+  private static final class ExtIndexWriter extends IndexWriter {
+
+    /**
+     * Constructs a new IndexWriter per the settings given in <code>conf</code>.
+     * If you want to make "live" changes to this writer instance, use
+     * {@link #getConfig()}.
+     *
+     * <p>
+     * <b>NOTE:</b> after ths writer is created, the given configuration instance
+     * cannot be passed to another writer. If you intend to do so, you should
+     * {@link IndexWriterConfig#clone() clone} it beforehand.
+     *
+     * @param d    the index directory. The index is either created or appended
+     *             according <code>conf.getOpenMode()</code>.
+     * @param conf the configuration settings according to which IndexWriter should
+     *             be initialized.
+     * @throws IOException if the directory cannot be read/written to, or if it does not
+     *                     exist and <code>conf.getOpenMode()</code> is
+     *                     <code>OpenMode.APPEND</code> or if there is any other low-level
+     *                     IO error
+     */
+    public ExtIndexWriter(Directory d, IndexWriterConfig conf) throws IOException {
+      super(d, conf);
+    }
+
+    public DirectoryReader getReader() throws IOException {
+      flush();
+      try {
+        DirectoryReader reader = DirectoryReader.open(getDirectory());
+        return reader;
+      } catch (Throwable re) {
+        return null;
+      }
+    }
+  }
+
+  private class QueryThread extends Thread {
+    ExtIndexWriter writer;
+    QueryThread(ExtIndexWriter writer) {
+      this.writer = writer;
+    }
+
+    public void run() {
+      while (!stopRun.get() && !Thread.interrupted()) {
+        try {
+          Thread.sleep(5000);
+        } catch (InterruptedException e) {
+          return;
+        }
+        try {
+          DirectoryReader reader = writer.getReader();
+          if (reader == null) {
+            System.err.println("# no reader");
+            continue;
+          }
+          reader = UninvertingReader.wrap(reader, mapping::get);
+          for (LeafReaderContext ctx : reader.leaves()) {
+            checkLeaf(ctx);
+          }
+        } catch (IOException e) {
+          stopRun.set(true);
+          throw new RuntimeException(e);
+        }
+      }
+    }
+
+    private void checkLeaf(LeafReaderContext ctx) throws IOException {
+      System.err.println(" -- checking " + ctx.reader());
+      LeafReader reader = ctx.reader();
+      FieldInfo fi = reader.getFieldInfos().fieldInfo(TEST_NUM_FIELD);
+      NumericDocValues dv = reader.getNumericDocValues(TEST_NUM_FIELD);
+      if (mapping.get(TEST_NUM_FIELD) == null) {
+        return;
+      }
+      //stopRun.set(true);
+      int present = 0;
+      while (dv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+        Document doc = reader.document(dv.docID());
+        long value = dv.longValue();
+        long stringValue = Long.parseLong(doc.get(TEST_NUM_FIELD + ".ck"));
+        if (value != stringValue) {
+          throw new IOException("value mismatch, base=" + ctx.docBase + ", doc=" + dv.docID() + ", string=" + stringValue + ", dv=" + value);
+        }
+        present++;
+      }
+      if (present < reader.numDocs()) {
+        LeafReader r = UninvertingReader.unwrap(reader);
+        String dvStats = "?";
+        if (r instanceof CodecReader) {
+          dvStats = UninvertingReader.getDVStats((CodecReader)r, reader.getFieldInfos().fieldInfo(TEST_NUM_FIELD)).toString();
+        }
+        System.err.println("count mismatch: numDocs=" + reader.numDocs() + ", present=" + present + ", reader=" + reader
+            + "\ndvStats=" + dvStats);
+        System.exit(-1);
+//        throw new IOException("count mismatch: numDocs=" + reader.numDocs() + ", present=" + present + ", reader=" + reader
+//            + "\ndvStats=" + dvStats);
+      }
+    }
+  }
+
+  private class IndexingThread extends Thread {
+    IndexWriter writer;
+    String threadId;
+    IndexingThread(String threadId, IndexWriter writer) {
+      this.threadId = threadId;
+      this.writer = writer;
+    }
+
+
+    public void run() {
+      int id = 0;
+      BytesRefBuilder builder = new BytesRefBuilder();
+      while (!stopRun.get() && !Thread.interrupted()) {
+        Document d = new Document();
+        Field f = new Field("id", id + "-" + threadId, TextField.TYPE_STORED);
+        d.add(f);
+        UninvertingReader.Type type = mapping.get(TEST_NUM_FIELD);
+        if (type != null) {
+          f = new NumericDocValuesField(TEST_NUM_FIELD, id);
+        } else {
+          f = new LegacyLongField(TEST_NUM_FIELD, id, LegacyLongField.TYPE_STORED);
+        }
+        d.add(f);
+        d.add(new Field(TEST_NUM_FIELD + ".ck", String.valueOf(id), TextField.TYPE_STORED));
+        type = mapping.get(TEST_STR_FIELD);
+        if (type != null) {
+          f = new SortedDocValuesField(TEST_STR_FIELD, new BytesRef(facets[id %10]));
+        } else {
+          f = new Field(TEST_STR_FIELD, facets[id % 10], TextField.TYPE_STORED);
+        }
+        d.add(f);
+        try {
+          writer.addDocument(d);
+          if (id > 0 && (id % 20 == 0)) {
+            System.err.println("- added " + id);
+            // delete first 500
+            for (int j = id - 20; j < id - 20 + 10; j++) {
+              writer.deleteDocuments(new Term("id", j + "-" + threadId));
+            }
+            writer.commit();
+            try {
+              Thread.sleep(50);
+            } catch (InterruptedException e) {
+              return;
+            }
+          }
+        } catch (IOException ioe) {
+          stopRun.set(true);
+          throw new RuntimeException("writer.addDocument", ioe);
+        }
+        id++;
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2c8e96d/solr/core/src/java/code/test/AddDvStress.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/code/test/AddDvStress.java b/solr/core/src/java/code/test/AddDvStress.java
new file mode 100644
index 0000000..e1430df
--- /dev/null
+++ b/solr/core/src/java/code/test/AddDvStress.java
@@ -0,0 +1,230 @@
+package code.test;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.Random;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.zookeeper.KeeperException;
+
+public class AddDvStress {
+
+  static AtomicBoolean stopRun = new AtomicBoolean(false);
+  static AtomicBoolean endCycle = new AtomicBoolean(false);
+  static AtomicBoolean badState = new AtomicBoolean(false);
+//  static AtomicBoolean pauseIndexing = new AtomicBoolean(false);
+//  static AtomicBoolean indexingPaused = new AtomicBoolean(false);
+  static AtomicInteger querySleepInterval = new AtomicInteger(100);
+  static AtomicInteger docCount = new AtomicInteger(0);
+
+  //static int DOCS_PER_PASS = 10_000;
+
+  // TODO try pausing indexing while changing the merge policy after not pausing and see if still a problem
+  static String COLLECTION = "eoe";
+  static String FIELD = "mystring";
+  static String CONFIGSET_PATH_WITHOUT = "/Users/ab/tmp/without/conf";
+  //static String CONFIGSET_PATH_WITH = "/Users/Erick/with/conf";
+  static String CONFIGSET_NAME = "without";
+  static String ZK = "localhost:9983";
+  static AtomicInteger docsPerPass = new AtomicInteger();
+  static AtomicInteger globalId = new AtomicInteger();
+  static AtomicInteger batchSize= new AtomicInteger();
+  static AtomicInteger trigger= new AtomicInteger();
+  static AtomicBoolean configsChanged = new AtomicBoolean(false);
+
+
+  Random rand =  new Random();
+  CloudSolrClient client;
+
+  public static void main(String[] args) {
+    try {
+      AddDvStress dvs = new AddDvStress();
+      dvs.doTests();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  void doTests() throws InterruptedException, SolrServerException, KeeperException, IOException {
+    try (CloudSolrClient csc = new CloudSolrClient.Builder(Collections.singletonList(ZK), Optional.empty()).build()) {
+      client = csc;
+      client.connect();
+      int cycle = 0;
+      while (stopRun.get() == false) {
+        System.out.println("\n\n\n\nStarting cycle: " + cycle++);
+        COLLECTION = "eoe-" + cycle;
+        client.setDefaultCollection(COLLECTION);
+        docCount.set(0);
+        docsPerPass.set(rand.nextInt(100_000) + 10_000);
+        batchSize.set(rand.nextInt(1_000) + 100);
+        trigger.set(rand.nextInt(docsPerPass.get()) + AddDvStress.docsPerPass.get() / 2);
+        System.out.println("Starting run with docsPerPass =  " + docsPerPass.get() + " batchSize = " + batchSize.get() + " trigger for changing config = " + trigger.get());
+        runTest();
+      }
+    }
+  }
+
+  // Things to wonder about
+  // Somehow curl to update isn't doing the right thing or there's a race condition here?
+  // Can you generate the problem if you upload a full configset?
+  void runTest() throws IOException, SolrServerException, InterruptedException, KeeperException {
+    Thread indexer = null;
+    Thread queryer = null;
+    endCycle.set(false);
+    teardown(); // Don't teardown() at the end. In case of abnormal termination, we want to see what's there.
+    setup();
+    if (stopRun.get() == false) {
+      Thread changer = new Thread(new ConfigChangerThread(client));
+      changer.start();
+
+      indexer = new Thread(new IndexerThread(client));
+      indexer.start();
+
+      queryer = new Thread(new QueryThread(client));
+      queryer.start();
+
+      // Now sleep for 5 seconds to let the query thread run for a bit more
+      System.out.println("Waiting 5 seconds for query thread to have a final go");
+      querySleepInterval.set(1000);
+      Thread.sleep(5000);
+
+//      //
+//      if (badState.get()) {
+//        System.out.println("Trying an optimize to 1 segment");
+//        client.optimize(COLLECTION, true, true, 1);
+//        System.out.println("Optimize done");
+//        Thread.sleep(2000); // give the query process time for another go-round.
+//      }
+      indexer.join();
+      endCycle.set(true);
+      queryer.join();
+      changer.join(); // should be totally immediate.
+    }
+  }
+
+  static String COLL_ADMIN = "http://localhost:8983/solr/admin/collections";
+  static final String NAME_PREFIX = "ext.mergePolicyFactory.collections.";
+  static final String MP_VAL = "{\"class\":\"org.apache.solr.index.AddDocValuesMergePolicyFactory\"}";
+  static final String CLUSTERPROPPATH = "/clusterprops.json";
+
+  void setup() throws IOException, SolrServerException, InterruptedException {
+    System.out.println("Uploding the configset and creating the collection");
+    // upload the configset
+    client.getZkStateReader().getConfigManager().uploadConfigDir(Paths.get(CONFIGSET_PATH_WITHOUT), CONFIGSET_NAME);
+
+    // create the collection
+    CollectionAdminRequest.Create createCollectionRequest = CollectionAdminRequest
+        .createCollection(COLLECTION, CONFIGSET_NAME, 1, 1);
+    CollectionAdminResponse resp = createCollectionRequest.process(client);
+    if (resp.getStatus() != 0 || resp.isSuccess() == false) {
+      System.out.println("Failed to create collection");
+      stopRun.set(true);
+    }
+
+    // Make sure the collection is active
+    for (int idx = 0; idx < 25; idx++) {
+      try {
+        SolrInputDocument doc = new SolrInputDocument();
+        doc.addField("id", 1);
+        doc.addField(AddDvStress.FIELD, "aa");
+        client.add(doc);
+        client.commit(false, false);
+      } catch (Exception e) {
+        Thread.sleep(1000);
+      }
+    }
+  }
+
+  boolean zkNodeExists(SolrZkClient zkClient, String path) throws InterruptedException {
+    boolean OK = false;
+    for (int idx = 0; idx < 10 && OK == false; ++idx) {
+      try {
+        if (zkClient.exists("/collections/" + COLLECTION, true)) {
+          Thread.sleep(1000);
+        }
+        OK = true;
+      } catch (Exception e) {
+        Thread.sleep(1000);
+      }
+    }
+    if (OK == false) {
+      System.out.println("Found collection when it shouldn't be there!");
+      stopRun.set(true);
+    }
+    return OK;
+  }
+  void teardown() throws IOException, SolrServerException, KeeperException, InterruptedException {
+    System.out.println("Tearing down anything remaining from prior run");
+    // delete the collection
+    SolrZkClient zkClient = client.getZkStateReader().getZkClient();
+    if (zkClient.exists("/collections/" + COLLECTION, true)) {
+      CollectionAdminRequest.Delete deleteCollectionRequest = CollectionAdminRequest
+          .deleteCollection(COLLECTION);
+      deleteCollectionRequest.process(client);
+    }
+
+    if (zkNodeExists(zkClient, "/collections/" + COLLECTION) == false) return;
+
+    // delete the configset
+    if (zkClient.exists("/configs/" + CONFIGSET_NAME, true)) {
+      removeConfigSet();
+    }
+
+    if (zkNodeExists(zkClient, "/configs/" + CONFIGSET_NAME) == false) return;
+
+    // ensure the cluster property is nuked
+    if (zkClient.exists(CLUSTERPROPPATH, true)) {
+      byte[] bytes = "{}".getBytes(StandardCharsets.UTF_8);
+      zkClient.setData(CLUSTERPROPPATH, bytes, false);
+      boolean OK =  false;
+      for (int idx = 0; idx < 10 && OK == false; idx++) {
+        String props = new String(zkClient.getData(CLUSTERPROPPATH, null, null, false));
+        if (props.equals("{}")) {
+          OK = true;
+        } else {
+          Thread.sleep(1000);
+        }
+      }
+      if (OK == false) {
+        System.out.println("found cluster props with content");
+        stopRun.set(true);
+      }
+    }
+  }
+
+  void removeConfigSet() throws KeeperException, InterruptedException {
+    SolrZkClient zkClient = client.getZkStateReader().getZkClient();
+    List<String> descendents = new ArrayList<>();
+    descendents.add("/configs/" + CONFIGSET_NAME);
+
+    getConfigList(zkClient, descendents, "/configs/" + CONFIGSET_NAME);
+
+    for (int idx = descendents.size() - 1; idx >= 0; idx--) {
+      zkClient.delete(descendents.get(idx), -1, true);
+    }
+  }
+
+  void getConfigList(SolrZkClient zkClient, List<String> descendents, String path) throws KeeperException, InterruptedException {
+    List<String> kids = zkClient.getChildren(path, null, true);
+    // First add all the kids at this level
+    for (String kid : kids) {
+      descendents.add(path + "/" + kid);
+    }
+    // now get their children
+    for (String kid : kids) {
+      getConfigList(zkClient, descendents, path + "/" + kid);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2c8e96d/solr/core/src/java/code/test/ConfigChangerThread.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/code/test/ConfigChangerThread.java b/solr/core/src/java/code/test/ConfigChangerThread.java
new file mode 100644
index 0000000..f019c44
--- /dev/null
+++ b/solr/core/src/java/code/test/ConfigChangerThread.java
@@ -0,0 +1,145 @@
+package code.test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.entity.UrlEncodedFormEntity;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.message.BasicNameValuePair;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.schema.SchemaRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.schema.SchemaResponse;
+
+public class ConfigChangerThread implements Runnable {
+  final CloudSolrClient client;
+
+  final Random rand = new Random();
+
+  ConfigChangerThread(CloudSolrClient client) {
+    this.client = client;
+  }
+  @Override
+  public void run() {
+    // Change this the first time the indexing thread is between 1/2 done with the first pass and 1/2 done with the second
+
+    AddDvStress.configsChanged.set(false);
+    while (AddDvStress.docCount.get() < AddDvStress.trigger.get()) {
+      try {
+        Thread.sleep(100);
+      } catch (InterruptedException e) {
+        System.out.println("Failed to change the configuration");
+        AddDvStress.stopRun.set(true);
+      }
+    }
+    System.out.println("Changing the config now, doc count : " + AddDvStress.docCount.get());
+    try {
+//      AddDvStress.pauseIndexing.set(true);
+//      while (AddDvStress.stopRun.get() == false && AddDvStress.indexingPaused.get() == false) {
+//        Thread.sleep(100);
+//      }
+      updateCollectionWithAPI();
+//      updateCollectionWithConfigset();
+//      AddDvStress.pauseIndexing.set(false);
+      System.out.println("successfully changed the config, doc count : " + AddDvStress.docCount.get());
+    //} catch (InterruptedException | IOException| SolrServerException e) {
+    } catch (IOException| SolrServerException e) {
+      e.printStackTrace();
+      AddDvStress.stopRun.set(true);
+    }
+    System.out.println("Done modifying configs and MP, leaving thread");
+    AddDvStress.configsChanged.set(true);
+  }
+
+//  void updateCollectionWithConfigset() throws IOException, SolrServerException {
+//    client.getZkStateReader().getConfigManager().uploadConfigDir(Paths.get(AddDvStress.CONFIGSET_PATH_WITH), CONFIGSET_NAME);
+//    System.out.println("Reloading collection with a configset " + AddDvStress.CONFIGSET_PATH_WITH);
+//    final CollectionAdminRequest.Reload reloadCollectionRequest = new CollectionAdminRequest.Reload()
+//        .setCollectionName(AddDvStress.COLLECTION);
+//    CollectionAdminResponse resp = reloadCollectionRequest.process(client);
+//    if (resp.getStatus() != 0 || resp.isSuccess() == false) {
+//      System.out.println("Failed to reload collection");
+//      AddDvStress.stopRun.set(true);
+//    }
+//
+//
+//  }
+  void updateCollectionWithAPI() throws IOException, SolrServerException {
+    System.out.println("Changing the schema");
+    // First update the MP
+    //    curl 'http://localhost:8983/solr/admin/collections?action=clusterprop&name=ext.mergePolicyFactory.collections.test_dv&
+    // val=\{"class":"org.apache.solr.index.AddDocValuesMergePolicyFactory"\}’
+    System.out.println("Updating the MP");
+    HttpClient httpclient = HttpClients.createDefault();
+    HttpPost httppost = new HttpPost(AddDvStress.COLL_ADMIN);
+    //curl '
+    // http://localhost:8983/solr/admin/collections?action=clusterprop
+    // name=ext.mergePolicyFactory.collections.test_dv
+    // val=\{"class":"org.apache.solr.index.AddDocValuesMergePolicyFactory"\}'
+
+    // Request parameters and other properties.
+
+    List<NameValuePair> params = new ArrayList<NameValuePair>(3);
+    params.add(new BasicNameValuePair("action", "clusterprop"));
+    params.add(new BasicNameValuePair("name", AddDvStress.NAME_PREFIX + AddDvStress.COLLECTION));
+    params.add(new BasicNameValuePair("val", AddDvStress.MP_VAL));
+    httppost.setEntity(new UrlEncodedFormEntity(params, "UTF-8"));
+
+    //Execute and get the response.
+    HttpResponse response = httpclient.execute(httppost);
+    if (response.getStatusLine().getStatusCode() != 200) {
+      System.out.println("Changing the MP failed");
+      AddDvStress.stopRun.set(true);
+    }
+
+    // Reload the collection 1
+    System.out.println("Reloading collection 1");
+    final CollectionAdminRequest.Reload reloadCollectionRequest = CollectionAdminRequest
+        .reloadCollection(AddDvStress.COLLECTION);
+    CollectionAdminResponse resp = reloadCollectionRequest.process(client);
+    if (resp.getStatus() != 0 || resp.isSuccess() == false) {
+      System.out.println("Failed to reload the collection");
+      AddDvStress.stopRun.set(true);
+    }
+    try {
+      Thread.sleep(5_000);
+    } catch (InterruptedException e) {
+      System.out.println("Failed to change the configuration");
+      AddDvStress.stopRun.set(true);
+    }
+    // then update the schema
+    Map<String, Object> fieldAttributes = new LinkedHashMap<>();
+    fieldAttributes.put("name", AddDvStress.FIELD);
+    fieldAttributes.put("type", "string");
+    fieldAttributes.put("stored", false);
+    fieldAttributes.put("indexed", true);
+    fieldAttributes.put("required", false);
+    fieldAttributes.put("docValues", true);
+    SchemaRequest.ReplaceField replaceFieldRequest = new SchemaRequest.ReplaceField(fieldAttributes);
+    SchemaResponse.UpdateResponse replaceFieldResponse = replaceFieldRequest.process(client);
+    if (replaceFieldResponse.getStatus() != 0 || replaceFieldResponse.getResponse().get("errors") != null) {
+      System.out.println("Could not modify schema");
+      AddDvStress.stopRun.set(true);
+    }
+
+    // Reload the collection
+    System.out.println("Reloading collection 2");
+    resp = reloadCollectionRequest.process(client);
+    if (resp.getStatus() != 0 || resp.isSuccess() == false) {
+      System.out.println("Failed to create collection");
+      AddDvStress.stopRun.set(true);
+    }
+    
+    System.out.println("Done modifying configs and MP, leaving thread");
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2c8e96d/solr/core/src/java/code/test/IndexerThread.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/code/test/IndexerThread.java b/solr/core/src/java/code/test/IndexerThread.java
new file mode 100644
index 0000000..863c857
--- /dev/null
+++ b/solr/core/src/java/code/test/IndexerThread.java
@@ -0,0 +1,88 @@
+package code.test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.common.SolrInputDocument;
+
+public class IndexerThread implements Runnable {
+
+  final CloudSolrClient client;
+
+  IndexerThread(CloudSolrClient client) {
+    this.client = client;
+  }
+
+  static String[] facets = {"aa", "bb", "cc", "dd", "ee", "ff", "gg", "hh", "ii", "jj"};
+//  static int BATCH_SIZE = 100;
+
+  Random rand = new Random();
+
+
+  @Override
+  public void run() {
+    try {
+      System.out.println("Indexing batch");
+      addDocs(); // get the first set of docs in place, presumably without docValues
+      if (AddDvStress.stopRun.get()) {
+        return;
+      }
+//      System.out.println("Starting second cycle");
+//      addDocs();
+      System.out.println("Indexing done");
+    } catch (SolrServerException | IOException | InterruptedException e) {
+      e.printStackTrace();
+      AddDvStress.stopRun.set(true);
+      return;
+    }
+  }
+
+  boolean doCommit(boolean waitFlush, boolean waitSearcher) throws InterruptedException {
+    for (int idx = 0; idx < 10; ++idx) {
+      try {
+        client.commit(waitFlush, waitSearcher);
+        return true;
+      } catch (Exception e) {
+        Thread.sleep(1000);
+      }
+    }
+    AddDvStress.stopRun.set(true);
+    return false;
+  }
+
+  void addDocs() throws IOException, SolrServerException, InterruptedException {
+    List<SolrInputDocument> docs = new ArrayList<>();
+    int finalCount = 0;
+    for (int idx = 0; finalCount < AddDvStress.docsPerPass.get() && AddDvStress.stopRun.get() == false; idx++) {
+      if (AddDvStress.configsChanged.get()) {
+        finalCount++;
+      }
+      SolrInputDocument doc = new SolrInputDocument();
+//      doc.addField("id", rand.nextInt(AddDvStress.docsPerPass.get()));
+      doc.addField("id", AddDvStress.globalId.getAndIncrement());
+      doc.addField(AddDvStress.FIELD, facets[rand.nextInt(facets.length)]);
+      docs.add(doc);
+      if ((idx % AddDvStress.batchSize.get()) == 0) {
+        client.add(docs);
+        AddDvStress.docCount.set(AddDvStress.docCount.get() + docs.size()); // No not truly atomic but this is only one thread!
+        if (doCommit(false, false) == false) {
+          return;
+        }
+        docs.clear();
+//        while (AddDvStress.stopRun.get() == false && AddDvStress.pauseIndexing.get()) {
+//          AddDvStress.indexingPaused.set(true);
+//          Thread.sleep(100);
+//        }
+//        AddDvStress.indexingPaused.set(false);
+      }
+    }
+    if (docs.size() > 0) {
+      doCommit(true, true);
+      AddDvStress.docCount.set(AddDvStress.docCount.get() + docs.size()); // No not truly atomic but this is only one thread!
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d2c8e96d/solr/core/src/java/code/test/QueryThread.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/code/test/QueryThread.java b/solr/core/src/java/code/test/QueryThread.java
new file mode 100644
index 0000000..871dee9
--- /dev/null
+++ b/solr/core/src/java/code/test/QueryThread.java
@@ -0,0 +1,57 @@
+package code.test;
+
+import java.io.IOException;
+
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.FacetField;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.params.ModifiableSolrParams;
+
+public class QueryThread implements Runnable {
+  final CloudSolrClient client;
+
+  QueryThread(CloudSolrClient client) {
+    this.client = client;
+  }
+
+  @Override
+  public void run() {
+    while (AddDvStress.stopRun.get() == false && AddDvStress.endCycle.get() == false) {
+      ModifiableSolrParams params = new ModifiableSolrParams();
+      params.add("q", "*:*");
+      params.add("facet", "true");
+      params.add("facet.field", AddDvStress.FIELD);
+      params.add("facet.limit", "-1");
+      int total = 0;
+      long numFound = 0;
+      try {
+        Thread.sleep(AddDvStress.querySleepInterval.get());
+        QueryResponse resp = client.query(params);
+        FacetField ff = resp.getFacetField(AddDvStress.FIELD);
+
+        for (FacetField.Count count : ff.getValues()) {
+          total += count.getCount();
+        }
+        numFound = resp.getResults().getNumFound();
+        if (numFound != total) {
+          System.out.println("Found: " + total + " expected: " + numFound);
+          AddDvStress.badState.set(true);
+          AddDvStress.querySleepInterval.set(1000);
+        } else {
+          AddDvStress.badState.set(false);
+          AddDvStress.querySleepInterval.set(100);
+        }
+      } catch (IOException | SolrServerException | InterruptedException e) {
+        e.printStackTrace();
+        AddDvStress.stopRun.set(true);
+      }
+      if (AddDvStress.endCycle.get()) {
+        System.out.println("Final query, found: " + total + " expected: " + numFound + " docCount: " + AddDvStress.docCount.get());
+        if (total != numFound) {
+          AddDvStress.stopRun.set(true);
+        }
+      }
+    }
+  }
+}


[2/4] lucene-solr:jira/solr-12259: More fixes, still broken.

Posted by ab...@apache.org.
More fixes, still broken.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/d5a7b307
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/d5a7b307
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/d5a7b307

Branch: refs/heads/jira/solr-12259
Commit: d5a7b3071fdbd2db493c77aef15f5882aea1a270
Parents: 37bbad3
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Wed Dec 12 20:29:18 2018 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Wed Dec 12 20:29:18 2018 +0100

----------------------------------------------------------------------
 .../org/apache/lucene/index/SegmentReader.java  |   2 +-
 .../index/AddDocValuesMergePolicyFactory.java   |  47 +++--
 .../solr/uninverting/UninvertingReader.java     | 207 ++++++++++++++-----
 .../solr/cloud/BasicDistributedZkTest.java      |   2 +-
 .../index/WrapperMergePolicyFactoryTest.java    |   2 +-
 .../cloud/AbstractFullDistribZkTestBase.java    |   2 +-
 6 files changed, 191 insertions(+), 71 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a7b307/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
index b368b96..535e21e 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentReader.java
@@ -72,7 +72,7 @@ public final class SegmentReader extends CodecReader {
    * @throws CorruptIndexException if the index is corrupt
    * @throws IOException if there is a low-level IO error
    */
-  SegmentReader(SegmentCommitInfo si, int createdVersionMajor, IOContext context) throws IOException {
+  public SegmentReader(SegmentCommitInfo si, int createdVersionMajor, IOContext context) throws IOException {
     this.si = si.clone();
     this.originalSi = si;
     this.metaData = new LeafMetaData(createdVersionMajor, si.info.getMinVersion(), si.info.getIndexSort());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a7b307/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java b/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
index 9cfee8c..e96535d 100644
--- a/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
+++ b/solr/core/src/java/org/apache/solr/index/AddDocValuesMergePolicyFactory.java
@@ -198,7 +198,7 @@ public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
             if (sb.length() > 0) {
               sb.append(' ');
             }
-            sb.append(info.toString() + "(" + source + "," + shouldRewrite + "," + clazz + ")");
+            sb.append(info.info.name + "(" + source + "," + shouldRewrite + "," + clazz + ")");
           }
         }
         if (needWrapping > 0) {
@@ -233,20 +233,23 @@ public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
       StringBuilder b = new StringBuilder();
       final int numSegments = oneMerge.segments.size();
       for(int i=0;i<numSegments;i++) {
-        if (i > 0) {
-          b.append('\n');
-        }
-        b.append(oneMerge.segments.get(i).toString());
-        b.append('#');
+        b.append("\n* ");
+        b.append(oneMerge.segments.get(i).info.name);
+        b.append(":");
+        b.append("\n\tsource: ");
         Map<String, String> diag = oneMerge.segments.get(i).info.getDiagnostics();
         b.append(diag.get("source"));
         if (diag.get("class") != null) {
-          b.append('#');
+          b.append("\n\tclass: ");
           b.append(diag.get("class"));
-          if (diag.get("segString") != null) {
-            b.append("#ss=");
-            b.append(diag.get("segString"));
-          }
+        }
+        if (diag.get("wrapping") != null) {
+          b.append("\n\twrapping: ");
+          b.append(diag.get("wrapping"));
+        }
+        if (diag.get("segString") != null) {
+          b.append("\n\tsegString: ");
+          b.append(diag.get("segString").replaceAll("\n", "\n\t| "));
         }
       }
       return b.toString();
@@ -273,6 +276,7 @@ public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
      */
 
     private String shouldRewrite(SegmentCommitInfo info) {
+      String rewriteReason = null;
       // Need to get a reader for this segment
       try (SegmentReader reader = new SegmentReader(info, Version.LUCENE_8_0_0.major, IOContext.DEFAULT)) {
         // check the marker, if defined
@@ -284,27 +288,24 @@ public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
 //        }
         StringBuilder sb = new StringBuilder();
         for (FieldInfo fi : reader.getFieldInfos()) {
-          if (fi.getDocValuesType() != DocValuesType.NONE) {
-            Map<String, Object> dvStats = UninvertingReader.getDVStats(reader, fi);
-            if (!((Integer)dvStats.get("numDocs")).equals((Integer)dvStats.get("present"))) {
-              throw new RuntimeException("segment: " + info.toString() + " " + fi.name + ", dvStats: " + dvStats + " diag: " + info.info.getDiagnostics());
-            }
-          }
           if (mapping.apply(fi) != null) {
             if (sb.length() > 0) {
               sb.append(',');
             }
             sb.append(fi.name);
+            sb.append(UninvertingReader.getDVStats(reader, fi).toString());
           }
         }
 //        return sb.toString();
-        return sb.length() > 0 ? sb.toString() : null;
+        rewriteReason = sb.length() > 0 ? sb.toString() : null;
       } catch (IOException e) {
         // It's safer to rewrite the segment if there's an error, although it may lead to a lot of work.
         log.warn("Error opening a reader for segment {}, will rewrite segment", info.toString());
         count("shouldRewriteError");
-        return "error " + e.getMessage();
+        rewriteReason = "error " + e.getMessage();
       }
+      if (rewriteReason == null) rewriteReason = "forced";
+      return rewriteReason;
     }
 
     @Override
@@ -409,7 +410,9 @@ public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
     @Override
     public void setMergeInfo(SegmentCommitInfo info) {
       super.setMergeInfo(info);
-      info.info.getDiagnostics().put(DIAGNOSTICS_MARKER_PROP, marker);
+      if (marker != null) {
+        info.info.getDiagnostics().put(DIAGNOSTICS_MARKER_PROP, marker);
+      }
       info.info.getDiagnostics().put("class", getClass().getSimpleName());
       info.info.getDiagnostics().put("segString", AddDVMergePolicy.segString(this));
       if (metaPairs != null && metaPairs.length > 1) {
@@ -418,7 +421,9 @@ public class AddDocValuesMergePolicyFactory extends WrapperMergePolicyFactory {
           len--;
         }
         for (int i = 0; i < len; i += 2) {
-          info.info.getDiagnostics().put(metaPairs[i], metaPairs[i + 1]);
+          if (metaPairs[i] != null && metaPairs[i + 1] != null) {
+            info.info.getDiagnostics().put(metaPairs[i], metaPairs[i + 1]);
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a7b307/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
index e804635..5b07e61 100644
--- a/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
+++ b/solr/core/src/java/org/apache/solr/uninverting/UninvertingReader.java
@@ -18,15 +18,19 @@ package org.apache.solr.uninverting;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.Map;
 import java.util.function.Function;
 
+import org.apache.lucene.codecs.DocValuesProducer;
 import org.apache.lucene.document.BinaryDocValuesField;
 import org.apache.lucene.document.NumericDocValuesField;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.BinaryDocValues;
+import org.apache.lucene.index.CodecReader;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.FieldInfo;
@@ -37,7 +41,11 @@ import org.apache.lucene.index.IndexOptions;
 import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.SortedDocValues;
+import org.apache.lucene.index.SortedNumericDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.RamUsageEstimator;
 import org.apache.solr.uninverting.FieldCache.CacheEntry;
 
@@ -234,52 +242,7 @@ public class UninvertingReader extends FilterLeafReader {
     // Calculate a new FieldInfos that has DocValuesType where we didn't before
     ArrayList<FieldInfo> newFieldInfos = new ArrayList<>(in.getFieldInfos().size());
     for (FieldInfo fi : in.getFieldInfos()) {
-      DocValuesType type = fi.getDocValuesType();
-      // fields which currently don't have docValues, but are uninvertable (indexed or points data present)
-      if (type == DocValuesType.NONE &&
-          (fi.getIndexOptions() != IndexOptions.NONE || (fi.getPointNumBytes() > 0 && fi.getPointDataDimensionCount() == 1))) {
-        Type t = mapping.apply(fi.name); // could definitely return null, thus still can't uninvert it
-        if (t != null) {
-          if (t == Type.INTEGER_POINT || t == Type.LONG_POINT || t == Type.FLOAT_POINT || t == Type.DOUBLE_POINT) {
-            // type uses points
-            if (fi.getPointDataDimensionCount() == 0) {
-              continue;
-            }
-          } else {
-            // type uses inverted index
-            if (fi.getIndexOptions() == IndexOptions.NONE) {
-              continue;
-            }
-          }
-          switch(t) {
-            case INTEGER_POINT:
-            case LONG_POINT:
-            case FLOAT_POINT:
-            case DOUBLE_POINT:
-            case LEGACY_INTEGER:
-            case LEGACY_LONG:
-            case LEGACY_FLOAT:
-            case LEGACY_DOUBLE:
-              type = DocValuesType.NUMERIC;
-              break;
-            case BINARY:
-              type = DocValuesType.BINARY;
-              break;
-            case SORTED:
-              type = DocValuesType.SORTED;
-              break;
-            case SORTED_SET_BINARY:
-            case SORTED_SET_INTEGER:
-            case SORTED_SET_FLOAT:
-            case SORTED_SET_LONG:
-            case SORTED_SET_DOUBLE:
-              type = DocValuesType.SORTED_SET;
-              break;
-            default:
-              throw new AssertionError();
-          }
-        }
-      }
+      DocValuesType type = shouldWrap(fi, mapping);
       if (type != fi.getDocValuesType()) { // we changed it
         wrap = true;
         newFieldInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(),
@@ -297,6 +260,56 @@ public class UninvertingReader extends FilterLeafReader {
     }
   }
 
+  public static DocValuesType shouldWrap(FieldInfo fi, Function<String, Type> mapping) {
+    DocValuesType type = fi.getDocValuesType();
+    // fields which currently don't have docValues, but are uninvertable (indexed or points data present)
+    if (type == DocValuesType.NONE &&
+        (fi.getIndexOptions() != IndexOptions.NONE || (fi.getPointNumBytes() > 0 && fi.getPointDataDimensionCount() == 1))) {
+      Type t = mapping.apply(fi.name); // could definitely return null, thus still can't uninvert it
+      if (t != null) {
+        if (t == Type.INTEGER_POINT || t == Type.LONG_POINT || t == Type.FLOAT_POINT || t == Type.DOUBLE_POINT) {
+          // type uses points
+          if (fi.getPointDataDimensionCount() == 0) {
+            return null;
+          }
+        } else {
+          // type uses inverted index
+          if (fi.getIndexOptions() == IndexOptions.NONE) {
+            return null;
+          }
+        }
+        switch(t) {
+          case INTEGER_POINT:
+          case LONG_POINT:
+          case FLOAT_POINT:
+          case DOUBLE_POINT:
+          case LEGACY_INTEGER:
+          case LEGACY_LONG:
+          case LEGACY_FLOAT:
+          case LEGACY_DOUBLE:
+            type = DocValuesType.NUMERIC;
+            break;
+          case BINARY:
+            type = DocValuesType.BINARY;
+            break;
+          case SORTED:
+            type = DocValuesType.SORTED;
+            break;
+          case SORTED_SET_BINARY:
+          case SORTED_SET_INTEGER:
+          case SORTED_SET_FLOAT:
+          case SORTED_SET_LONG:
+          case SORTED_SET_DOUBLE:
+            type = DocValuesType.SORTED_SET;
+            break;
+          default:
+            throw new AssertionError();
+        }
+      }
+    }
+    return type;
+  }
+
   final Function<String, Type> mapping;
   final FieldInfos fieldInfos;
 
@@ -445,6 +458,108 @@ public class UninvertingReader extends FilterLeafReader {
     return new FieldCacheStats(totalSize, info);
   }
 
+  public static Map<String, Object> getDVStats(CodecReader reader, FieldInfo fi) throws IOException {
+    DocValuesType type = fi.getDocValuesType();
+    try {
+      int present = 0;
+      int zeroOrNull = 0;
+      Bits liveDocs = reader.getLiveDocs();
+      DocValuesProducer producer = reader.getDocValuesReader();
+      int expected = reader.numDocs();
+      int deletedButPresent = 0;
+      switch (type) {
+        case NUMERIC:
+          NumericDocValues ndv = reader.getNumericDocValues(fi.name);
+          while (ndv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+            if (liveDocs != null && !liveDocs.get(ndv.docID())) {
+              deletedButPresent++;
+            }
+            long num = ndv.longValue();
+            if (num == 0) {
+              zeroOrNull++;
+            }
+            present++;
+          }
+          break;
+        case BINARY:
+          BinaryDocValues bdv = reader.getBinaryDocValues(fi.name);
+          while (bdv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+            if (liveDocs != null && !liveDocs.get(bdv.docID())) {
+              deletedButPresent++;
+            }
+            BytesRef bytes = bdv.binaryValue();
+            if (bytes == null || bytes.length == 0) {
+              zeroOrNull++;
+            }
+            present++;
+          }
+          break;
+        case SORTED:
+          SortedDocValues sdv = reader.getSortedDocValues(fi.name);
+          while (sdv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+            if (liveDocs != null && !liveDocs.get(sdv.docID())) {
+              deletedButPresent++;
+            }
+            BytesRef bytes = sdv.binaryValue();
+            if (bytes == null || bytes.length == 0) {
+              zeroOrNull++;
+            }
+            present++;
+          }
+          break;
+        case SORTED_NUMERIC:
+          SortedNumericDocValues sndv = reader.getSortedNumericDocValues(fi.name);
+          while (sndv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+            if (liveDocs != null && !liveDocs.get(sndv.docID())) {
+              deletedButPresent++;
+            }
+            if (sndv.docValueCount() > 0) {
+              for (int j = 0; j < sndv.docValueCount(); j++) {
+                long val = sndv.nextValue();
+              }
+              present++;
+            } else {
+              zeroOrNull++;
+            }
+          }
+          break;
+        case SORTED_SET:
+          SortedSetDocValues ssdv = reader.getSortedSetDocValues(fi.name);
+          while (ssdv.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+            if (liveDocs != null && !liveDocs.get(ssdv.docID())) {
+              deletedButPresent++;
+            }
+            if (ssdv.getValueCount() > 0) {
+              long ord;
+              boolean allPresent = true;
+              while ((ord = ssdv.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+                BytesRef term = ssdv.lookupOrd(ord);
+                if (term == null || term.length == 0) {
+                  allPresent = false;
+                }
+              }
+              if (!allPresent) {
+                zeroOrNull++;
+              }
+              present++;
+            } else {
+              zeroOrNull++;
+            }
+          }
+          break;
+      }
+      Map<String, Object> result = new HashMap<>();
+      result.put("numDocs", reader.numDocs());
+      result.put("expected", expected);
+      result.put("present", present);
+      result.put("nullOrZero", zeroOrNull);
+      result.put("delPresent", deletedButPresent);
+      return result;
+    } catch (IOException e) {
+      return Collections.singletonMap("error", e.getMessage());
+    }
+  }
+
   public static int getUninvertedStatsSize() {
     return FieldCache.DEFAULT.getCacheEntries().length;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a7b307/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
index c95ae85..895fa29 100644
--- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
@@ -239,7 +239,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
     indexr(id, 16, "SubjectTerms_mfacet", new String[]  {"test 1", "test 2", "test3"});
     String[] vals = new String[100];
     for (int i=0; i<100; i++) {
-      vals[i] = "test " + i;
+      vals[i] = "code/test " + i;
     }
     indexr(id, 17, "SubjectTerms_mfacet", vals);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a7b307/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java b/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
index d416e13..1d830ca 100644
--- a/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
@@ -52,7 +52,7 @@ public class WrapperMergePolicyFactoryTest extends SolrTestCaseJ4 {
     final double testMaxMergedSegmentMB = defaultTMP.getMaxMergedSegmentMB() * 10;
 
     final MergePolicyFactoryArgs args = new MergePolicyFactoryArgs();
-    args.add(WrapperMergePolicyFactory.WRAPPED_PREFIX, "test");
+    args.add(WrapperMergePolicyFactory.WRAPPED_PREFIX, "code/test");
     args.add("test.class", TieredMergePolicyFactory.class.getName());
     args.add("test.maxMergeAtOnce", testMaxMergeAtOnce);
     args.add("test.maxMergedSegmentMB", testMaxMergedSegmentMB);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5a7b307/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index 7ff4226..cebd4c7 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -1235,7 +1235,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
         "test3"});
     String[] vals = new String[100];
     for (int i = 0; i < 100; i++) {
-      vals[i] = "test " + i;
+      vals[i] = "code/test " + i;
     }
     indexr(id, 17, "SubjectTerms_mfacet", vals);
 


[4/4] lucene-solr:jira/solr-12259: Merge branch 'master' into jira/solr-12259

Posted by ab...@apache.org.
Merge branch 'master' into jira/solr-12259


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/010ca9e5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/010ca9e5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/010ca9e5

Branch: refs/heads/jira/solr-12259
Commit: 010ca9e548fd4fde6cf69b96511c33ff2928e288
Parents: d2c8e96 f547938
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Tue Dec 18 00:28:04 2018 +0100
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Tue Dec 18 00:28:04 2018 +0100

----------------------------------------------------------------------
 dev-tools/doap/lucene.rdf                       |    7 +
 dev-tools/doap/solr.rdf                         |    7 +
 lucene/CHANGES.txt                              |   47 +-
 lucene/MIGRATE.txt                              |    6 +
 .../miscellaneous/TestEmptyTokenStream.java     |    2 +-
 .../lucene/analysis/ko/dict/UserDictionary.java |    6 +-
 .../index/TestBackwardsCompatibility.java       |   42 +-
 .../org/apache/lucene/index/index.7.6.0-cfs.zip |  Bin 0 -> 15655 bytes
 .../apache/lucene/index/index.7.6.0-nocfs.zip   |  Bin 0 -> 15649 bytes
 .../lucene/index/sorted-invalid.7.5.0.zip       |  Bin 0 -> 908443 bytes
 .../org/apache/lucene/index/sorted.7.6.0.zip    |  Bin 0 -> 74541 bytes
 .../lucene/codecs/lucene70/IndexedDISI.java     |  114 +-
 .../codecs/lucene70/IndexedDISICache.java       |  330 --
 .../lucene70/IndexedDISICacheFactory.java       |  260 --
 .../lucene/codecs/lucene70/LongCompressor.java  |  251 --
 .../lucene70/Lucene70DocValuesProducer.java     |  250 +-
 .../codecs/lucene70/Lucene70NormsProducer.java  |    8 +-
 .../java/org/apache/lucene/document/Field.java  |   14 +-
 .../org/apache/lucene/document/StoredField.java |   13 +
 .../apache/lucene/index/BufferedUpdates.java    |    6 +-
 .../lucene/index/DocValuesFieldUpdates.java     |   75 +-
 .../apache/lucene/index/FieldUpdatesBuffer.java |   21 +
 .../lucene/index/FreqProxTermsWriter.java       |   41 +-
 .../lucene/index/FrozenBufferedUpdates.java     |  216 +-
 .../org/apache/lucene/index/IndexWriter.java    |   65 +-
 .../org/apache/lucene/index/IndexableField.java |    7 +
 .../org/apache/lucene/index/MultiSorter.java    |   30 +-
 .../index/NumericDocValuesFieldUpdates.java     |   31 +-
 .../apache/lucene/search/MatchAllDocsQuery.java |    4 +-
 .../org/apache/lucene/util/BytesRefArray.java   |    3 +-
 .../apache/lucene/util/BytesRefIterator.java    |   10 +-
 .../java/org/apache/lucene/util/Constants.java  |    1 +
 .../java/org/apache/lucene/util/RankBitSet.java |  302 --
 .../util/packed/AbstractPagedMutable.java       |    3 +-
 .../lucene/codecs/lucene70/TestIndexedDISI.java |  125 +-
 .../codecs/lucene70/TestLongCompressor.java     |   87 -
 .../perfield/TestPerFieldPostingsFormat2.java   |   12 +-
 .../org/apache/lucene/index/TestAddIndexes.java |   62 +-
 .../index/TestConcurrentMergeScheduler.java     |    2 +-
 .../test/org/apache/lucene/index/TestCrash.java |    2 +-
 .../apache/lucene/index/TestDeletionPolicy.java |    8 +-
 .../org/apache/lucene/index/TestDocValues.java  |  109 +-
 .../lucene/index/TestDocValuesFieldUpdates.java |   59 +-
 .../lucene/index/TestFieldUpdatesBuffer.java    |   24 +-
 .../index/TestFlushByRamOrCountsPolicy.java     |   16 +-
 .../lucene/index/TestFrozenBufferedUpdates.java |  104 +
 .../lucene/index/TestIndexManyDocuments.java    |    2 +-
 .../apache/lucene/index/TestIndexSorting.java   | 1740 ++++----
 .../apache/lucene/index/TestIndexWriter.java    |   45 +-
 .../lucene/index/TestIndexWriterCommit.java     |    2 +-
 .../lucene/index/TestIndexWriterDelete.java     |    2 +-
 .../lucene/index/TestIndexWriterFromReader.java |   20 +-
 .../lucene/index/TestIndexWriterMaxDocs.java    |    6 +-
 .../index/TestIndexWriterMergePolicy.java       |    2 +-
 .../lucene/index/TestIndexWriterMerging.java    |    8 +-
 .../lucene/index/TestIndexWriterReader.java     |    4 +-
 .../org/apache/lucene/index/TestIsCurrent.java  |    8 +-
 .../apache/lucene/index/TestRollingUpdates.java |    2 +-
 .../TestSoftDeletesRetentionMergePolicy.java    |   20 +-
 .../lucene/index/TestThreadedForceMerge.java    |    4 +-
 .../lucene/index/TestTieredMergePolicy.java     |   31 +-
 .../org/apache/lucene/search/TestBoolean2.java  |    2 +-
 .../lucene/search/TestMatchAllDocsQuery.java    |   23 +
 .../lucene/search/TestSearcherManager.java      |    2 +-
 .../lucene/store/TestByteBuffersDirectory.java  |    2 +-
 .../apache/lucene/store/TestRAMDirectory.java   |    2 +-
 .../org/apache/lucene/util/TestRankBitSet.java  |  107 -
 .../directory/DirectoryTaxonomyWriter.java      |    4 +-
 lucene/ivy-versions.properties                  |   18 +-
 lucene/licenses/hamcrest-core-1.3.jar.sha1      |    1 +
 lucene/licenses/hamcrest-core-LICENSE-BSD.txt   |   27 +
 lucene/licenses/hamcrest-core-NOTICE.txt        |    0
 lucene/licenses/junit-4.10.jar.sha1             |    1 -
 lucene/licenses/junit-4.12.jar.sha1             |    1 +
 .../randomizedtesting-runner-2.6.4.jar.sha1     |    1 -
 .../randomizedtesting-runner-2.7.2.jar.sha1     |    1 +
 .../lucene/index/memory/TestMemoryIndex.java    |    2 +-
 .../lucene/queries/mlt/TestMoreLikeThis.java    |    2 +-
 .../document/LatLonShapeBoundingBoxQuery.java   |  269 +-
 .../java/org/apache/lucene/geo/Rectangle2D.java |  315 ++
 .../intervals/DifferenceIntervalFunction.java   |   10 +
 .../intervals/DisjunctionIntervalsSource.java   |   15 +
 .../intervals/FilteredIntervalsSource.java      |  103 +
 .../lucene/search/intervals/IntervalFilter.java |    5 +
 .../search/intervals/IntervalFunction.java      |   54 +-
 .../search/intervals/IntervalIterator.java      |   11 +
 .../search/intervals/IntervalMatches.java       |    9 +
 .../intervals/IntervalMatchesIterator.java      |   38 +
 .../lucene/search/intervals/Intervals.java      |   23 +-
 .../intervals/LowpassIntervalsSource.java       |   90 -
 .../MinimizingConjunctionIntervalsSource.java   |    7 +-
 .../search/intervals/TermIntervalsSource.java   |    5 +
 .../lucene/search/intervals/package-info.java   |    2 +
 .../org/apache/lucene/geo/TestRectangle2D.java  |  100 +
 .../search/intervals/TestIntervalQuery.java     |   10 +
 .../lucene/search/intervals/TestIntervals.java  |   61 +-
 .../spatial3d/geom/GeoComplexPolygon.java       |    2 +-
 .../apache/lucene/spatial3d/geom/GeoPoint.java  |   34 +-
 lucene/test-framework/ivy.xml                   |    1 +
 .../apache/lucene/index/RandomIndexWriter.java  |    8 +-
 .../ThreadedIndexingAndSearchingTestCase.java   |    2 +-
 lucene/tools/junit4/solr-tests.policy           |   26 +-
 solr/CHANGES.txt                                |   39 +-
 solr/NOTICE.txt                                 |    6 +
 solr/bin/solr                                   |    9 +-
 .../dataimport/TestHierarchicalDocBuilder.java  |    1 +
 .../solr/ltr/feature/TestExternalFeatures.java  |    4 +-
 .../model/TestMultipleAdditiveTreesModel.java   |    2 +-
 .../solr/client/solrj/embedded/JettyConfig.java |   15 +-
 .../client/solrj/embedded/JettySolrRunner.java  |   50 +-
 .../solr/client/solrj/embedded/SSLConfig.java   |  166 -
 .../solr/cloud/OverseerTaskProcessor.java       |    2 +-
 .../cloud/autoscaling/NodeAddedTrigger.java     |    3 +-
 .../solr/cloud/autoscaling/NodeLostTrigger.java |    5 +-
 .../org/apache/solr/core/CoreContainer.java     |   57 +-
 .../src/java/org/apache/solr/core/SolrCore.java |   98 +-
 .../java/org/apache/solr/core/SolrInfoBean.java |    2 +-
 .../handler/component/HttpShardHandler.java     |   26 +-
 .../component/HttpShardHandlerFactory.java      |  144 +-
 .../handler/component/ShardHandlerFactory.java  |   10 +-
 .../solr/response/BinaryResponseWriter.java     |  121 +-
 .../org/apache/solr/response/DocsStreamer.java  |    6 +-
 .../org/apache/solr/response/ResultContext.java |    2 +
 .../java/org/apache/solr/schema/FieldType.java  |    7 +
 .../org/apache/solr/schema/IndexSchema.java     |   10 +-
 .../java/org/apache/solr/schema/StrField.java   |    6 +
 .../java/org/apache/solr/schema/TextField.java  |    5 +
 .../apache/solr/search/SolrDocumentFetcher.java |   24 +-
 .../similarities/BM25SimilarityFactory.java     |   11 +-
 .../LegacyBM25SimilarityFactory.java            |   64 +
 .../similarities/SchemaSimilarityFactory.java   |   12 +-
 .../solr/security/AuthenticationPlugin.java     |  129 +-
 .../apache/solr/security/BasicAuthPlugin.java   |  213 +-
 .../ConfigurableInternodeAuthHadoopPlugin.java  |    6 +
 .../security/DelegationTokenKerberosFilter.java |    5 +
 .../apache/solr/security/HadoopAuthFilter.java  |    5 +
 .../apache/solr/security/HadoopAuthPlugin.java  |   35 +-
 .../solr/security/HttpClientBuilderPlugin.java  |    5 +
 .../apache/solr/security/KerberosFilter.java    |    8 +-
 .../apache/solr/security/KerberosPlugin.java    |    6 +
 .../solr/security/PKIAuthenticationPlugin.java  |   69 +-
 .../security/Sha256AuthenticationProvider.java  |    7 +-
 .../org/apache/solr/servlet/HttpSolrCall.java   |    1 +
 .../apache/solr/servlet/SolrDispatchFilter.java |   45 +-
 .../apache/solr/update/SolrCmdDistributor.java  |   50 +-
 .../solr/update/StreamingSolrClients.java       |   78 +-
 .../java/org/apache/solr/update/UpdateLog.java  |    2 +-
 .../apache/solr/update/UpdateShardHandler.java  |   92 +-
 .../processor/DistributedUpdateProcessor.java   |   38 +-
 .../processor/TolerantUpdateProcessor.java      |   26 +-
 .../src/java/org/apache/solr/util/TimeOut.java  |    2 +-
 .../stats/InstrumentedHttpListenerFactory.java  |  114 +
 solr/core/src/test-files/log4j2.xml             |    1 +
 .../solr/collection1/conf/schema-bm25.xml       |   17 +
 solr/core/src/test-files/solr/solr.xml          |    2 +-
 .../cloud/AssignBackwardCompatibilityTest.java  |    5 +-
 .../solr/cloud/BasicDistributedZk2Test.java     |   12 +-
 .../org/apache/solr/cloud/CloudTestUtils.java   |    4 +-
 .../solr/cloud/CollectionsAPISolrJTest.java     |    5 +-
 .../apache/solr/cloud/DistributedQueueTest.java |   12 +
 .../solr/cloud/DocValuesNotIndexedTest.java     |   29 +-
 .../apache/solr/cloud/HttpPartitionTest.java    |    2 +-
 .../apache/solr/cloud/KerberosTestServices.java |    3 +-
 .../apache/solr/cloud/MoveReplicaHDFSTest.java  |    5 +
 .../apache/solr/cloud/MultiThreadedOCPTest.java |   11 +-
 ...verseerCollectionConfigSetProcessorTest.java |    4 +-
 .../apache/solr/cloud/RemoteQueryErrorTest.java |    2 +-
 .../org/apache/solr/cloud/SSLMigrationTest.java |    2 +-
 .../solr/cloud/SaslZkACLProviderTest.java       |    3 +-
 .../apache/solr/cloud/TestCloudConsistency.java |    6 +-
 .../solr/cloud/TestMiniSolrCloudClusterSSL.java |   20 +-
 .../apache/solr/cloud/TestSSLRandomization.java |    2 +
 .../cloud/TestSolrCloudWithKerberosAlt.java     |    3 +
 .../CollectionsAPIAsyncDistributedZkTest.java   |    2 +-
 .../CollectionsAPIDistributedZkTest.java        |   16 +-
 .../api/collections/CustomCollectionTest.java   |    2 +
 .../HdfsCollectionsAPIDistributedZkTest.java    |    5 +
 .../AutoAddReplicasPlanActionTest.java          |    2 +-
 .../autoscaling/AutoScalingHandlerTest.java     |    1 +
 .../autoscaling/ExecutePlanActionTest.java      |    9 +-
 .../HdfsAutoAddReplicasIntegrationTest.java     |    4 +
 .../MetricTriggerIntegrationTest.java           |   10 +-
 .../NodeAddedTriggerIntegrationTest.java        |    2 +-
 .../cloud/autoscaling/NodeAddedTriggerTest.java |   18 +-
 .../NodeLostTriggerIntegrationTest.java         |    2 +-
 .../cloud/autoscaling/NodeLostTriggerTest.java  |   18 +-
 .../NodeMarkersRegistrationTest.java            |    3 +
 .../autoscaling/RestoreTriggerStateTest.java    |    3 +-
 .../SearchRateTriggerIntegrationTest.java       |    3 +-
 .../autoscaling/SystemLogListenerTest.java      |   46 +-
 .../TriggerCooldownIntegrationTest.java         |    9 +-
 .../autoscaling/TriggerIntegrationTest.java     |   10 +-
 .../TriggerSetPropertiesIntegrationTest.java    |  250 +-
 .../cloud/autoscaling/sim/SimCloudManager.java  |    3 +-
 .../sim/SimClusterStateProvider.java            |  183 +-
 .../autoscaling/sim/SimDistribStateManager.java |   87 +-
 .../autoscaling/sim/SimSolrCloudTestCase.java   |   39 -
 .../sim/TestSimComputePlanAction.java           |   10 +-
 .../sim/TestSimExecutePlanAction.java           |   20 +-
 .../autoscaling/sim/TestSimExtremeIndexing.java |   20 +-
 .../sim/TestSimGenericDistributedQueue.java     |    1 -
 .../autoscaling/sim/TestSimLargeCluster.java    |   45 +-
 .../sim/TestSimNodeAddedTrigger.java            |   16 +-
 .../autoscaling/sim/TestSimNodeLostTrigger.java |   17 +-
 .../autoscaling/sim/TestSimPolicyCloud.java     |   22 +-
 .../sim/TestSimTriggerIntegration.java          |   59 +-
 .../solr/cloud/cdcr/CdcrBootstrapTest.java      |    2 +-
 .../org/apache/solr/core/TestConfigSets.java    |    2 +-
 .../org/apache/solr/core/TestCoreDiscovery.java |    2 +-
 .../solr/core/TestQuerySenderListener.java      |    8 +-
 .../test/org/apache/solr/core/TestSolrXml.java  |    2 +-
 .../solr/handler/TestReplicationHandler.java    |   33 +-
 .../org/apache/solr/handler/TestSQLHandler.java | 4045 +++++++++---------
 .../handler/component/InfixSuggestersTest.java  |    2 +-
 .../component/TestHttpShardHandlerFactory.java  |    4 +-
 .../metrics/rrd/SolrRrdBackendFactoryTest.java  |   14 +-
 .../solr/response/TestBinaryResponseWriter.java |   45 +-
 .../solr/rest/schema/TestBulkSchemaAPI.java     |    6 +-
 .../search/TestPayloadScoreQParserPlugin.java   |    2 +-
 .../solr/search/function/TestFunctionQuery.java |   10 +-
 .../similarities/TestBM25SimilarityFactory.java |    8 +-
 .../TestLegacyBM25SimilarityFactory.java        |   45 +
 .../TestNonDefinedSimilarityFactory.java        |   28 +-
 .../similarities/TestPerFieldSimilarity.java    |    8 +-
 .../solr/security/BasicAuthIntegrationTest.java |  141 +-
 .../solr/security/BasicAuthStandaloneTest.java  |    4 +-
 .../HttpParamDelegationTokenPlugin.java         |   64 +-
 .../PKIAuthenticationIntegrationTest.java       |   18 +-
 .../security/TestPKIAuthenticationPlugin.java   |   13 +-
 .../TestSolrCloudWithHadoopAuthPlugin.java      |    8 +-
 .../solr/update/MockingHttp2SolrClient.java     |  156 +
 .../org/apache/solr/update/PeerSyncTest.java    |    2 +-
 .../update/TestInPlaceUpdatesStandalone.java    |    2 +-
 .../org/apache/solr/update/UpdateLogTest.java   |    2 +-
 .../ClassificationUpdateProcessorTest.java      |    2 +-
 .../SSLCredentialProviderFactoryTest.java       |   27 +-
 solr/licenses/hamcrest-core-1.3.jar.sha1        |    1 +
 solr/licenses/hamcrest-core-LICENSE-BSD.txt     |   27 +
 solr/licenses/hamcrest-core-NOTICE.txt          |    0
 .../http2-client-9.4.14.v20181114.jar.sha1      |    1 +
 solr/licenses/http2-client-LICENSE-ASL.txt      |  202 +
 solr/licenses/http2-client-NOTICE.txt           |  111 +
 .../http2-common-9.4.14.v20181114.jar.sha1      |    1 +
 solr/licenses/http2-common-LICENSE-ASL.txt      |  202 +
 solr/licenses/http2-common-NOTICE.txt           |  111 +
 .../http2-hpack-9.4.14.v20181114.jar.sha1       |    1 +
 solr/licenses/http2-hpack-LICENSE-ASL.txt       |  202 +
 solr/licenses/http2-hpack-NOTICE.txt            |  111 +
 ...p-client-transport-9.4.14.v20181114.jar.sha1 |    1 +
 .../http2-http-client-transport-LICENSE-ASL.txt |  202 +
 .../http2-http-client-transport-NOTICE.txt      |  111 +
 .../http2-server-9.4.14.v20181114.jar.sha1      |    1 +
 solr/licenses/http2-server-LICENSE-ASL.txt      |  202 +
 solr/licenses/http2-server-NOTICE.txt           |  111 +
 .../jetty-alpn-client-9.4.14.v20181114.jar.sha1 |    1 +
 ...y-alpn-java-client-9.4.14.v20181114.jar.sha1 |    1 +
 ...y-alpn-java-server-9.4.14.v20181114.jar.sha1 |    1 +
 .../jetty-alpn-server-9.4.14.v20181114.jar.sha1 |    1 +
 .../jetty-client-9.4.14.v20181114.jar.sha1      |    1 +
 solr/licenses/junit-4.10.jar.sha1               |    1 -
 solr/licenses/junit-4.12.jar.sha1               |    1 +
 solr/licenses/junit4-ant-2.6.4.jar.sha1         |    1 -
 solr/licenses/junit4-ant-2.7.2.jar.sha1         |    1 +
 .../randomizedtesting-runner-2.6.4.jar.sha1     |    1 -
 .../randomizedtesting-runner-2.7.2.jar.sha1     |    1 +
 solr/licenses/zookeeper-3.4.11.jar.sha1         |    1 -
 solr/licenses/zookeeper-3.4.13.jar.sha1         |    1 +
 solr/server/etc/jetty-http.xml                  |    5 +
 solr/server/etc/jetty-https.xml                 |   26 +-
 solr/server/etc/jetty-https8.xml                |   69 +
 solr/server/ivy.xml                             |    6 +
 solr/server/modules/https8.mod                  |    9 +
 ...uthentication-and-authorization-plugins.adoc |   18 +-
 .../src/basic-authentication-plugin.adoc        |   47 +-
 .../src/distributed-requests.adoc               |    3 -
 solr/solr-ref-guide/src/format-of-solr-xml.adoc |    3 -
 .../src/major-changes-in-solr-8.adoc            |   43 +
 .../src/other-schema-elements.adoc              |    4 +-
 solr/solr-ref-guide/src/using-solrj.adoc        |   10 +-
 solr/solrj/ivy.xml                              |   12 +
 .../apache/solr/client/solrj/SolrRequest.java   |   22 +
 .../solr/client/solrj/embedded/SSLConfig.java   |  166 +
 .../solr/client/solrj/embedded/package.html     |   22 +
 .../impl/ConcurrentUpdateHttp2SolrClient.java   |  690 +++
 .../solr/client/solrj/impl/Http2SolrClient.java |  979 +++++
 .../solrj/impl/HttpClientBuilderFactory.java    |    3 +
 .../client/solrj/impl/HttpListenerFactory.java  |   38 +
 .../solr/client/solrj/impl/HttpSolrClient.java  |   13 +-
 .../solrj/impl/Krb5HttpClientBuilder.java       |   60 +-
 .../client/solrj/impl/LBHttp2SolrClient.java    |   69 +
 .../client/solrj/impl/LBHttpSolrClient.java     |  710 +--
 .../solr/client/solrj/impl/LBSolrClient.java    |  703 +++
 ...PreemptiveBasicAuthClientBuilderFactory.java |   42 +-
 .../client/solrj/io/stream/CloudSolrStream.java |   21 +-
 .../client/solrj/io/stream/FacetStream.java     |   14 +-
 .../client/solrj/io/stream/RandomStream.java    |    3 +-
 .../solrj/io/stream/SearchFacadeStream.java     |    6 +
 .../client/solrj/io/stream/SearchStream.java    |    9 +-
 .../solrj/io/stream/SignificantTermsStream.java |    3 +-
 .../client/solrj/io/stream/StatsStream.java     |    5 +-
 .../solrj/io/stream/TimeSeriesStream.java       |   14 +-
 .../solr/client/solrj/util/Constants.java       |   41 +
 .../solrj/util/SolrBasicAuthentication.java     |   61 +
 .../solr/common/ConditionalMapWriter.java       |    8 +-
 .../apache/solr/common/LinkedHashMapWriter.java |    2 +-
 .../java/org/apache/solr/common/MapWriter.java  |    5 +
 .../org/apache/solr/common/MapWriterMap.java    |    2 +-
 .../org/apache/solr/common/SolrDocument.java    |   18 +-
 .../apache/solr/common/SolrDocumentBase.java    |    2 +-
 .../apache/solr/common/SolrInputDocument.java   |    8 +-
 .../common/util/ByteArrayUtf8CharSequence.java  |   19 +-
 .../solr/common/util/FastOutputStream.java      |    2 +-
 .../apache/solr/common/util/JavaBinCodec.java   |   25 +-
 .../org/apache/solr/common/util/NamedList.java  |    2 +-
 .../solr/common/util/Utf8CharSequence.java      |    2 +-
 solr/solrj/src/test-files/log4j2.xml            |   40 +
 .../solrj/SolrExampleBinaryHttp2Test.java       |   58 +
 .../solr/client/solrj/SolrExampleTests.java     |   10 +-
 .../client/solrj/TestLBHttp2SolrClient.java     |  335 ++
 .../solrj/embedded/SolrExampleJettyTest.java    |   65 +
 .../SolrExampleStreamingBinaryHttp2Test.java    |  103 +
 .../embedded/SolrExampleStreamingHttp2Test.java |  139 +
 .../solrj/embedded/SolrExampleXMLHttp2Test.java |   45 +
 .../solrj/impl/BasicHttpSolrClientTest.java     |    2 +-
 .../solrj/impl/CloudSolrClientBadInputTest.java |    2 +-
 ...urrentUpdateHttp2SolrClientBadInputTest.java |   99 +
 ...pdateHttp2SolrClientMultiCollectionTest.java |   95 +
 .../ConcurrentUpdateHttp2SolrClientTest.java    |  233 +
 .../ConcurrentUpdateSolrClientBadInputTest.java |    4 +-
 .../impl/ConcurrentUpdateSolrClientTest.java    |    9 +-
 .../impl/Http2SolrClientCompatibilityTest.java  |  114 +
 .../client/solrj/impl/Http2SolrClientTest.java  |  598 +++
 .../solrj/impl/HttpSolrClientBadInputTest.java  |    4 +-
 .../impl/HttpSolrClientSSLAuthConPoolTest.java  |    4 +
 .../impl/LBHttpSolrClientBadInputTest.java      |    4 +-
 .../solrj/io/stream/StreamExpressionTest.java   |  174 +
 .../solrj/request/json/DomainMapTest.java       |    2 +-
 .../solrj/request/json/HeatmapFacetMapTest.java |    2 +-
 .../request/json/JsonQueryRequestUnitTest.java  |    2 +-
 .../solrj/request/json/QueryFacetMapTest.java   |    2 +-
 .../solrj/request/json/RangeFacetMapTest.java   |    2 +-
 .../solrj/request/json/TermsFacetMapTest.java   |    2 +-
 solr/test-framework/ivy.xml                     |    5 +-
 .../solr/BaseDistributedSearchTestCase.java     |   17 +-
 .../java/org/apache/solr/SolrJettyTestBase.java |   16 +-
 .../java/org/apache/solr/SolrTestCaseHS.java    |    2 +-
 .../java/org/apache/solr/SolrTestCaseJ4.java    |   11 +-
 .../cloud/AbstractFullDistribZkTestBase.java    |   28 +-
 .../java/org/apache/solr/cloud/ChaosMonkey.java |   41 +-
 .../apache/solr/cloud/MiniSolrCloudCluster.java |   19 +-
 .../solr/cloud/SolrCloudAuthTestCase.java       |  132 +
 .../org/apache/solr/cloud/ZkTestServer.java     |    8 +
 .../component/TrackingShardHandlerFactory.java  |   60 +-
 .../org/apache/solr/util/SSLTestConfig.java     |  146 +-
 solr/webapp/web/WEB-INF/web.xml                 |    2 +-
 solr/webapp/web/css/angular/login.css           |  103 +
 solr/webapp/web/css/angular/menu.css            |    2 +
 solr/webapp/web/index.html                      |  160 +-
 solr/webapp/web/js/angular/app.js               |   47 +-
 solr/webapp/web/js/angular/controllers/login.js |  146 +
 solr/webapp/web/js/angular/controllers/query.js |    3 +-
 solr/webapp/web/js/angular/services.js          |   26 +-
 solr/webapp/web/libs/angular-utf8-base64.js     |  217 +
 solr/webapp/web/libs/angular-utf8-base64.min.js |   45 +
 solr/webapp/web/partials/login.html             |   80 +
 365 files changed, 15337 insertions(+), 7492 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/010ca9e5/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
----------------------------------------------------------------------