You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by cp...@apache.org on 2017/04/25 13:59:28 UTC

[01/17] lucene-solr:jira/solr-8668: SOLR-10499: facet.heatmap DocSet to Bits optimizations

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/solr-8668 796d5d19e -> 4d1c775e2


SOLR-10499: facet.heatmap DocSet to Bits optimizations


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/99119621
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/99119621
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/99119621

Branch: refs/heads/jira/solr-8668
Commit: 991196216d2a0ba92ce8a772dd3a9a03eb29b241
Parents: fb8fd77
Author: David Smiley <ds...@apache.org>
Authored: Thu Apr 20 17:46:28 2017 -0400
Committer: David Smiley <ds...@apache.org>
Committed: Thu Apr 20 17:46:28 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 ++
 .../handler/component/SpatialHeatmapFacets.java | 41 ++++++++++----------
 2 files changed, 24 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99119621/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 3da5dd4..c1bede4 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -184,6 +184,9 @@ Optimizations
   instance if it already is modifiable, otherwise creates a new ModifiableSolrParams instance.
   (J�rg Rathlev via Koji)
 
+* SOLR-10499: facet.heatmap is now significantly faster when the docset (base query) matches everything and there are no
+  deleted docs.  It's also faster when the docset matches a small fraction of the index or none. (David Smiley)
+
 Bug Fixes
 ----------------------
 * SOLR-10281: ADMIN_PATHS is duplicated in two places and inconsistent. This can cause automatic

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99119621/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java b/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java
index 4ad882c..9bca5c7 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SpatialHeatmapFacets.java
@@ -32,6 +32,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.solr.search.DocIterator;
+import org.apache.solr.search.SolrIndexSearcher;
 import org.locationtech.spatial4j.context.SpatialContext;
 import org.locationtech.spatial4j.shape.Shape;
 import org.apache.lucene.spatial.prefix.HeatmapFacetCounter;
@@ -134,32 +137,13 @@ public class SpatialHeatmapFacets {
       gridLevel = strategy.getGrid().getLevelForDistance(distErr);
     }
 
-    // Turn docSet into Bits
-    Bits topAcceptDocs;
-    if (docSet instanceof BitDocSet) {
-      BitDocSet set = (BitDocSet) docSet;
-      topAcceptDocs = set.getBits();
-    } else {
-      topAcceptDocs = new Bits() {
-        @Override
-        public boolean get(int index) {
-          return docSet.exists(index);
-        }
-
-        @Override
-        public int length() {
-          return rb.req.getSearcher().maxDoc();
-        }
-      };
-    }
-
     //Compute!
     final HeatmapFacetCounter.Heatmap heatmap;
     try {
       heatmap = HeatmapFacetCounter.calcFacets(
           strategy,
           rb.req.getSearcher().getTopReaderContext(),
-          topAcceptDocs,
+          getTopAcceptDocs(docSet, rb.req.getSearcher()), // turn DocSet into Bits
           boundsShape,
           gridLevel,
           params.getFieldInt(fieldKey, FacetParams.FACET_HEATMAP_MAX_CELLS, 100_000) // will throw if exceeded
@@ -190,6 +174,23 @@ public class SpatialHeatmapFacets {
     return result;
   }
 
+  private static Bits getTopAcceptDocs(DocSet docSet, SolrIndexSearcher searcher) throws IOException {
+    if (searcher.getLiveDocs() == docSet) {
+      return null; // means match everything (all live docs). This can speedup things a lot.
+    } else if (docSet.size() == 0) {
+      return new Bits.MatchNoBits(searcher.maxDoc()); // can speedup things a lot
+    } else if (docSet instanceof BitDocSet) {
+      return ((BitDocSet) docSet).getBits();
+    } else {
+      // TODO DocSetBase.calcBits ought to be at DocSet level?
+      FixedBitSet bits = new FixedBitSet(searcher.maxDoc());
+      for (DocIterator iter = docSet.iterator(); iter.hasNext();) {
+        bits.set(iter.nextDoc());
+      }
+      return bits;
+    }
+  }
+
   private static void formatCountsAndAddToNL(String fieldKey, ResponseBuilder rb, SolrParams params,
                                              int columns, int rows, int[] counts, NamedList<Object> result) {
     final String format = params.getFieldParam(fieldKey, FacetParams.FACET_HEATMAP_FORMAT, FORMAT_INTS2D);


[14/17] lucene-solr:jira/solr-8668: SOLR-10559: Add let and get Streaming Expressions

Posted by cp...@apache.org.
SOLR-10559: Add let and get Streaming Expressions


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/cd02dd7d
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/cd02dd7d
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/cd02dd7d

Branch: refs/heads/jira/solr-8668
Commit: cd02dd7d4afcbd1dbbe20b1e5c700d4b7c8a8302
Parents: 71ce0d3
Author: Joel Bernstein <jb...@apache.org>
Authored: Mon Apr 24 17:27:37 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Mon Apr 24 20:19:17 2017 -0400

----------------------------------------------------------------------
 .../org/apache/solr/handler/StreamHandler.java  |   3 +-
 .../solr/client/solrj/io/stream/CellStream.java |   4 +
 .../solr/client/solrj/io/stream/GetStream.java  | 117 ++++++++++++++
 .../solr/client/solrj/io/stream/LetStream.java  | 152 +++++++++++++++++++
 .../client/solrj/io/stream/StreamContext.java   |   7 +
 .../solrj/io/stream/StreamExpressionTest.java   |  76 ++++++++++
 6 files changed, 358 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cd02dd7d/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 155933d..a1f7993 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -163,7 +163,8 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
       .withFunctionName("echo", EchoStream.class)
       .withFunctionName("cell", CellStream.class)
       .withFunctionName("list", ListStream.class)
-
+      .withFunctionName("let", LetStream.class)
+      .withFunctionName("get", GetStream.class)
       // metrics
          .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cd02dd7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
index aad99f6..fd33737 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
@@ -56,6 +56,10 @@ public class CellStream extends TupleStream implements Expressible {
     init(name, tupleStream);
   }
 
+  public String getName() {
+    return this.name;
+  }
+
   private void init(String name, TupleStream tupleStream) {
     this.name = name;
     this.stream = tupleStream;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cd02dd7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java
new file mode 100644
index 0000000..5a89f0f
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/GetStream.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class GetStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+
+  private StreamContext streamContext;
+  private String name;
+  private Iterator<Tuple> tupleIterator;
+
+  public GetStream(String name) throws IOException {
+    init(name);
+  }
+
+  public GetStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    String name = factory.getValueOperand(expression, 0);
+    init(name);
+  }
+
+  private void init(String name) {
+    this.name = name;
+  }
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    expression.addParameter(name);
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    StreamExplanation explanation = new StreamExplanation(getStreamNodeId().toString());
+    explanation.setFunctionName(factory.getFunctionName(this.getClass()));
+    explanation.setImplementingClass(this.getClass().getName());
+    explanation.setExpressionType(ExpressionType.STREAM_SOURCE);
+    explanation.setExpression(toExpression(factory, false).toString());
+    return explanation;
+  }
+
+  public void setStreamContext(StreamContext context) {
+    this.streamContext = context;
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList();
+    return l;
+  }
+
+  public Tuple read() throws IOException {
+    Map map = new HashMap();
+    if(tupleIterator.hasNext()) {
+      Tuple t = tupleIterator.next();
+      map.putAll(t.fields);
+      return new Tuple(map);
+    } else {
+      map.put("EOF", true);
+      return new Tuple(map);
+    }
+  }
+
+  public void close() throws IOException {
+  }
+
+  public void open() throws IOException {
+    Map<String, List<Tuple>> lets = streamContext.getLets();
+    List<Tuple> tuples = lets.get(name);
+    tupleIterator = tuples.iterator();
+  }
+
+  /** Return the stream sort - ie, the order in which records are returned */
+  public StreamComparator getStreamSort(){
+    return null;
+  }
+
+  public int getCost() {
+    return 0;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cd02dd7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
new file mode 100644
index 0000000..3a17211
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/LetStream.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class LetStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+  private TupleStream stream;
+  private List<CellStream> cellStreams;
+  private StreamContext streamContext;
+
+  public LetStream(TupleStream stream, List<CellStream> cellStreams) throws IOException {
+    init(stream, cellStreams);
+  }
+
+  public LetStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
+
+    if(streamExpressions.size() < 2){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting atleast 2 streams but found %d",expression, streamExpressions.size()));
+    }
+
+    TupleStream stream = null;
+    List<CellStream> cellStreams = new ArrayList();
+
+    for(StreamExpression streamExpression : streamExpressions) {
+      TupleStream s = factory.constructStream(streamExpression);
+      if(s instanceof CellStream) {
+        cellStreams.add((CellStream)s);
+      } else {
+        if(stream == null) {
+          stream = s;
+        } else {
+          throw new IOException("Found more then one stream that was not a CellStream");
+        }
+      }
+    }
+
+    init(stream, cellStreams);
+  }
+
+  private void init(TupleStream _stream, List<CellStream> _cellStreams) {
+    this.stream = _stream;
+    this.cellStreams = _cellStreams;
+  }
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    expression.addParameter(((Expressible) stream).toExpression(factory));
+    for(CellStream cellStream : cellStreams) {
+      expression.addParameter(((Expressible)cellStream).toExpression(factory));
+    }
+
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    StreamExplanation explanation = new StreamExplanation(getStreamNodeId().toString());
+    explanation.setFunctionName(factory.getFunctionName(this.getClass()));
+    explanation.setImplementingClass(this.getClass().getName());
+    explanation.setExpressionType(ExpressionType.STREAM_DECORATOR);
+    explanation.setExpression(toExpression(factory, false).toString());
+    explanation.addChild(stream.toExplanation(factory));
+
+    return explanation;
+  }
+
+  public void setStreamContext(StreamContext context) {
+    this.streamContext = context;
+    this.stream.setStreamContext(context);
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList<TupleStream>();
+    l.add(stream);
+
+    return l;
+  }
+
+  public Tuple read() throws IOException {
+    return stream.read();
+  }
+
+  public void close() throws IOException {
+    stream.close();
+  }
+
+  public void open() throws IOException {
+    Map<String, List<Tuple>> lets = streamContext.getLets();
+    for(CellStream cellStream : cellStreams) {
+      try {
+        cellStream.setStreamContext(streamContext);
+        cellStream.open();
+        Tuple tup = cellStream.read();
+        String name = cellStream.getName();
+        List<Tuple> tuples = (List<Tuple>)tup.get(name);
+        lets.put(name, tuples);
+      } finally {
+        cellStream.close();
+      }
+    }
+    stream.open();
+  }
+
+  /** Return the stream sort - ie, the order in which records are returned */
+  public StreamComparator getStreamSort(){
+    return null;
+  }
+
+  public int getCost() {
+    return 0;
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cd02dd7d/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
index 60a9274..5dcc7b3 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StreamContext.java
@@ -19,8 +19,10 @@ package org.apache.solr.client.solrj.io.stream;
 import java.io.Serializable;
 import java.util.Map;
 import java.util.HashMap;
+import java.util.List;
 
 import org.apache.solr.client.solrj.io.ModelCache;
+import org.apache.solr.client.solrj.io.Tuple;
 import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
@@ -37,12 +39,17 @@ public class StreamContext implements Serializable{
 
   private Map entries = new HashMap();
   private Map tupleContext = new HashMap();
+  private Map<String, List<Tuple>> lets = new HashMap();
   public int workerID;
   public int numWorkers;
   private SolrClientCache clientCache;
   private ModelCache modelCache;
   private StreamFactory streamFactory;
 
+  public Map<String, List<Tuple>> getLets(){
+    return lets;
+  }
+
   public Object get(Object key) {
     return entries.get(key);
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cd02dd7d/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index bb771b6..51c5301 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -5100,6 +5100,82 @@ public class StreamExpressionTest extends SolrCloudTestCase {
   }
 
   @Test
+  public void testLetGetStream() throws Exception {
+    UpdateRequest updateRequest = new UpdateRequest();
+    updateRequest.add(id, "hello", "test_t", "l b c d c e");
+    updateRequest.add(id, "hello1", "test_t", "l b c d c");
+
+    updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+    String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=\"id,test_t\", sort=\"id desc\")";
+    String cat = "let(cell(results,"+expr+"), get(results))";
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cat);
+    paramsLoc.set("qt", "/stream");
+
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 2);
+    assertTrue(tuples.get(0).get("id").equals("hello1"));
+    assertTrue(tuples.get(0).get("test_t").equals("l b c d c"));
+    assertTrue(tuples.get(1).get("id").equals("hello"));
+    assertTrue(tuples.get(1).get("test_t").equals("l b c d c e"));
+
+
+    //Test there are no side effects when transforming tuples.
+    expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=\"id,test_t\", sort=\"id desc\")";
+    cat = "let(cell(results,"+expr+"), list(select(get(results), id as newid, test_t), get(results)))";
+    paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cat);
+    paramsLoc.set("qt", "/stream");
+
+    solrStream = new SolrStream(url, paramsLoc);
+
+    context = new StreamContext();
+    solrStream.setStreamContext(context);
+    tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 4);
+    assertTrue(tuples.get(0).get("newid").equals("hello1"));
+    assertTrue(tuples.get(0).get("test_t").equals("l b c d c"));
+    assertTrue(tuples.get(1).get("newid").equals("hello"));
+    assertTrue(tuples.get(1).get("test_t").equals("l b c d c e"));
+    assertTrue(tuples.get(2).get("id").equals("hello1"));
+    assertTrue(tuples.get(2).get("test_t").equals("l b c d c"));
+    assertTrue(tuples.get(3).get("id").equals("hello"));
+    assertTrue(tuples.get(3).get("test_t").equals("l b c d c e"));
+
+    //Test multiple lets
+
+    //Test there are no side effects when transforming tuples.
+    expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=\"id,test_t\", sort=\"id desc\")";
+    String expr1 = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=\"id,test_t\", sort=\"id asc\")";
+
+    cat = "let(cell(results,"+expr+"), cell(results1,"+expr1+"), list(select(get(results), id as newid, test_t), get(results1)))";
+    paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cat);
+    paramsLoc.set("qt", "/stream");
+
+    solrStream = new SolrStream(url, paramsLoc);
+
+    context = new StreamContext();
+    solrStream.setStreamContext(context);
+    tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 4);
+    assertTrue(tuples.get(0).get("newid").equals("hello1"));
+    assertTrue(tuples.get(0).get("test_t").equals("l b c d c"));
+    assertTrue(tuples.get(1).get("newid").equals("hello"));
+    assertTrue(tuples.get(1).get("test_t").equals("l b c d c e"));
+    assertTrue(tuples.get(2).get("id").equals("hello"));
+    assertTrue(tuples.get(2).get("test_t").equals("l b c d c e"));
+    assertTrue(tuples.get(3).get("id").equals("hello1"));
+    assertTrue(tuples.get(3).get("test_t").equals("l b c d c"));
+  }
+
+  @Test
   public void testConvertEvaluator() throws Exception {
 
     UpdateRequest updateRequest = new UpdateRequest();


[15/17] lucene-solr:jira/solr-8668: SOLR-10520: fix child.facet.field counts

Posted by cp...@apache.org.
SOLR-10520: fix child.facet.field counts


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/680f4d7f
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/680f4d7f
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/680f4d7f

Branch: refs/heads/jira/solr-8668
Commit: 680f4d7fd378868254786107de92a894758f667c
Parents: cd02dd7
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Tue Apr 25 12:27:18 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Tue Apr 25 12:33:05 2017 +0300

----------------------------------------------------------------------
 solr/CHANGES.txt                                        |  1 +
 .../search/join/BlockJoinFacetComponentSupport.java     | 12 +++++++-----
 .../solr/search/join/BlockJoinFacetDistribTest.java     | 10 ++++++++--
 3 files changed, 16 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/680f4d7f/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 19fc3f8..417eed6 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -241,6 +241,7 @@ Bug Fixes
   when there was a mincount > 1.  This has been corrected by changing numBuckets cardinality processing to
   ignore mincount > 1 for non-distributed requests. (yonik)
 
+* SOLR-10520: child.facet.field doubled counts at least when rows>0. (Dr. Oleg Savrasov via Mikhail Khludnev) 
 
 Other Changes
 ----------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/680f4d7f/solr/core/src/java/org/apache/solr/search/join/BlockJoinFacetComponentSupport.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/BlockJoinFacetComponentSupport.java b/solr/core/src/java/org/apache/solr/search/join/BlockJoinFacetComponentSupport.java
index 85aa799..560e441 100644
--- a/solr/core/src/java/org/apache/solr/search/join/BlockJoinFacetComponentSupport.java
+++ b/solr/core/src/java/org/apache/solr/search/join/BlockJoinFacetComponentSupport.java
@@ -124,11 +124,13 @@ abstract class BlockJoinFacetComponentSupport extends SearchComponent {
 
   @Override
   public void handleResponses(ResponseBuilder rb, ShardRequest sreq) {
-    NamedList collectedChildFacetFields = getChildFacetFields(rb.rsp.getValues(), true);
-    List<ShardResponse> responses = sreq.responses;
-    for (ShardResponse shardResponse : responses) {
-      NamedList shardChildFacetFields = getChildFacetFields(shardResponse.getSolrResponse().getResponse(), false);
-      mergeFacets(collectedChildFacetFields, shardChildFacetFields);
+    if ((sreq.purpose & ShardRequest.PURPOSE_GET_TOP_IDS) != 0) {
+      NamedList collectedChildFacetFields = getChildFacetFields(rb.rsp.getValues(), true);
+      List<ShardResponse> responses = sreq.responses;
+      for (ShardResponse shardResponse : responses) {
+        NamedList shardChildFacetFields = getChildFacetFields(shardResponse.getSolrResponse().getResponse(), false);
+        mergeFacets(collectedChildFacetFields, shardChildFacetFields);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/680f4d7f/solr/core/src/test/org/apache/solr/search/join/BlockJoinFacetDistribTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/join/BlockJoinFacetDistribTest.java b/solr/core/src/test/org/apache/solr/search/join/BlockJoinFacetDistribTest.java
index 2bd30fa..1b12657 100644
--- a/solr/core/src/test/org/apache/solr/search/join/BlockJoinFacetDistribTest.java
+++ b/solr/core/src/test/org/apache/solr/search/join/BlockJoinFacetDistribTest.java
@@ -130,12 +130,18 @@ public class BlockJoinFacetDistribTest extends SolrCloudTestCase{
 
     // to parent query
     final String childQueryClause = "COLOR_s:("+(matchingColors.toString().replaceAll("[,\\[\\]]", " "))+")";
+      final boolean oldFacetsEnabled = random().nextBoolean();
       QueryResponse results = query("q", "{!parent which=\"type_s:parent\"}"+childQueryClause,
-          "facet", random().nextBoolean() ? "true":"false",
+          "facet", oldFacetsEnabled ? "true":"false", // try to enforce multiple phases
+              oldFacetsEnabled ? "facet.field" : "ignore" , "BRAND_s",
+              oldFacetsEnabled&&usually() ? "facet.limit" : "ignore" , "1",
+              oldFacetsEnabled&&usually() ? "facet.mincount" : "ignore" , "2",
+              oldFacetsEnabled&&usually() ? "facet.overrequest.count" : "ignore" , "0",
           "qt",  random().nextBoolean() ? "blockJoinDocSetFacetRH" : "blockJoinFacetRH",
           "child.facet.field", "COLOR_s",
           "child.facet.field", "SIZE_s",
-          "rows","0" // we care only abt results 
+          "distrib.singlePass", random().nextBoolean() ? "true":"false",
+          "rows", random().nextBoolean() ? "0":"10"
           );
       NamedList<Object> resultsResponse = results.getResponse();
       assertNotNull(resultsResponse);


[02/17] lucene-solr:jira/solr-8668: LUCENE-7791: add tests with index sorting and sparse docvalues fields

Posted by cp...@apache.org.
LUCENE-7791: add tests with index sorting and sparse docvalues fields


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/0404e373
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/0404e373
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/0404e373

Branch: refs/heads/jira/solr-8668
Commit: 0404e37357b90b583d306074838d69c7074ce307
Parents: 9911962
Author: Jim Ferenczi <ji...@apache.org>
Authored: Fri Apr 21 04:41:24 2017 +0200
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Fri Apr 21 04:41:24 2017 +0200

----------------------------------------------------------------------
 .../apache/lucene/index/TestIndexSorting.java   | 76 ++++++++++++++++++++
 1 file changed, 76 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0404e373/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index 4ef580b..ef5d4ef 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -2403,4 +2403,80 @@ public class TestIndexSorting extends LuceneTestCase {
     }
     IOUtils.close(r, w, dir);
   }
+
+  public void testIndexSortWithSparseField() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    SortField sortField = new SortField("dense_int", SortField.Type.INT, true);
+    Sort indexSort = new Sort(sortField);
+    iwc.setIndexSort(indexSort);
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for (int i = 0; i < 128; i++) {
+      Document doc = new Document();
+      doc.add(new NumericDocValuesField("dense_int", i));
+      if (i < 64) {
+        doc.add(new NumericDocValuesField("sparse_int", i));
+        doc.add(new BinaryDocValuesField("sparse_binary", new BytesRef(Integer.toString(i))));
+      }
+      w.addDocument(doc);
+    }
+    w.commit();
+    w.forceMerge(1);
+    DirectoryReader r = DirectoryReader.open(w);
+    assertEquals(1, r.leaves().size());
+    LeafReader leafReader = r.leaves().get(0).reader();
+
+    NumericDocValues denseValues = leafReader.getNumericDocValues("dense_int");
+    NumericDocValues sparseValues = leafReader.getNumericDocValues("sparse_int");
+    BinaryDocValues sparseBinaryValues = leafReader.getBinaryDocValues("sparse_binary");
+    for(int docID = 0; docID < 128; docID++) {
+      assertTrue(denseValues.advanceExact(docID));
+      assertEquals(127-docID, (int) denseValues.longValue());
+      if (docID >= 64) {
+        assertTrue(denseValues.advanceExact(docID));
+        assertTrue(sparseValues.advanceExact(docID));
+        assertTrue(sparseBinaryValues.advanceExact(docID));
+        assertEquals(docID, sparseValues.docID());
+        assertEquals(127-docID, (int) sparseValues.longValue());
+        assertEquals(new BytesRef(Integer.toString(127-docID)), sparseBinaryValues.binaryValue());
+      } else {
+        assertFalse(sparseBinaryValues.advanceExact(docID));
+        assertFalse(sparseValues.advanceExact(docID));
+      }
+    }
+    IOUtils.close(r, w, dir);
+  }
+
+  public void testIndexSortOnSparseField() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
+    SortField sortField = new SortField("sparse", SortField.Type.INT, false);
+    sortField.setMissingValue(Integer.MIN_VALUE);
+    Sort indexSort = new Sort(sortField);
+    iwc.setIndexSort(indexSort);
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for (int i = 0; i < 128; i++) {
+      Document doc = new Document();
+      if (i < 64) {
+        doc.add(new NumericDocValuesField("sparse", i));
+      }
+      w.addDocument(doc);
+    }
+    w.commit();
+    w.forceMerge(1);
+    DirectoryReader r = DirectoryReader.open(w);
+    assertEquals(1, r.leaves().size());
+    LeafReader leafReader = r.leaves().get(0).reader();
+    NumericDocValues sparseValues = leafReader.getNumericDocValues("sparse");
+    for(int docID = 0; docID < 128; docID++) {
+      if (docID >= 64) {
+        assertTrue(sparseValues.advanceExact(docID));
+        assertEquals(docID-64, (int) sparseValues.longValue());
+      } else {
+        assertFalse(sparseValues.advanceExact(docID));
+      }
+    }
+    IOUtils.close(r, w, dir);
+  }
+
 }


[09/17] lucene-solr:jira/solr-8668: SOLR-10489 Tentative fix for a test failure (Mikhail Khludnev via ab)

Posted by cp...@apache.org.
SOLR-10489 Tentative fix for a test failure (Mikhail Khludnev via ab)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/c09d82e5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/c09d82e5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/c09d82e5

Branch: refs/heads/jira/solr-8668
Commit: c09d82e5381a6b328c108c9b755986ddeb801c38
Parents: 487e085
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Apr 24 16:12:02 2017 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Apr 24 16:12:02 2017 +0200

----------------------------------------------------------------------
 .../apache/solr/handler/admin/StatsReloadRaceTest.java    | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/c09d82e5/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
index c455b69..9fee818 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/StatsReloadRaceTest.java
@@ -67,13 +67,14 @@ public class StatsReloadRaceTest extends SolrTestCaseJ4 {
       boolean isCompleted;
       do {
         if (random.nextBoolean()) {
-          requestMetrics();
+          requestMetrics(true);
         } else {
           requestCoreStatus();
         }
 
         isCompleted = checkReloadComlpetion(asyncId);
       } while (!isCompleted);
+      requestMetrics(false);
     }
   }
 
@@ -105,7 +106,7 @@ public class StatsReloadRaceTest extends SolrTestCaseJ4 {
     return isCompleted;
   }
 
-  private void requestMetrics() throws Exception {
+  private void requestMetrics(boolean softFail) throws Exception {
     SolrQueryResponse rsp = new SolrQueryResponse();
     String registry = "solr.core." + h.coreName;
     String key = "SEARCHER.searcher.indexVersion";
@@ -126,9 +127,12 @@ public class StatsReloadRaceTest extends SolrTestCaseJ4 {
         assertTrue(metrics.get(key) instanceof Long);
         break;
       } else {
-        Thread.sleep(1000);
+        Thread.sleep(500);
       }
     }
+    if (softFail && !found) {
+      return;
+    }
     assertTrue("Key " + key + " not found in registry " + registry, found);
   }
 


[17/17] lucene-solr:jira/solr-8668: SOLR-8668 working branch: TestHalfAndHalfDocValues tweak after merging in lastest master

Posted by cp...@apache.org.
SOLR-8668 working branch: TestHalfAndHalfDocValues tweak after merging in lastest master

also solrconfig-mergepolicy-nocfs.xml removal forgotten in previous commit


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4d1c775e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4d1c775e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4d1c775e

Branch: refs/heads/jira/solr-8668
Commit: 4d1c775e2e420bdb36b9706d6c0ae454c35b91b8
Parents: 7a2a47f
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Apr 25 14:44:16 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Apr 25 14:47:21 2017 +0100

----------------------------------------------------------------------
 .../conf/solrconfig-mergepolicy-nocfs.xml       | 34 --------------------
 .../solr/schema/TestHalfAndHalfDocValues.java   |  4 ---
 2 files changed, 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d1c775e/solr/core/src/test-files/solr/collection1/conf/solrconfig-mergepolicy-nocfs.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-mergepolicy-nocfs.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-mergepolicy-nocfs.xml
deleted file mode 100644
index b59cdc8..0000000
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-mergepolicy-nocfs.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" ?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<config>
-  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-  <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
-  <schemaFactory class="ClassicIndexSchemaFactory"/>
-
-  <indexConfig>
-    <useCompoundFile>${testSetNoCFSMergePolicyConfig.useCompoundFile:false}</useCompoundFile>
-    <mergePolicy class="org.apache.lucene.index.TieredMergePolicy">
-      <double name="noCFSRatio">0.5</double>
-    </mergePolicy>
-  </indexConfig>
-
-  <requestHandler name="standard" class="solr.StandardRequestHandler"></requestHandler>
-
-</config>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4d1c775e/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java b/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
index b0c3956..feb9236 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
@@ -42,10 +42,6 @@ public class TestHalfAndHalfDocValues extends SolrTestCaseJ4 {
     // segments with and without docvalues
     systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName());
 
-    // HACK: Don't use a RandomMergePolicy, but only use the mergePolicyFactory that we've just set
-    System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICYFACTORY, "true");
-    System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICY, "false");
-
     initCore("solrconfig-basic.xml", "schema-docValues.xml");
 
     // sanity check our schema meets our expectations


[13/17] lucene-solr:jira/solr-8668: SOLR-10548: SOLR-10552: numBuckets should use hll and ignore mincount>1 filtering

Posted by cp...@apache.org.
SOLR-10548: SOLR-10552: numBuckets should use hll and ignore mincount>1 filtering


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/71ce0d31
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/71ce0d31
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/71ce0d31

Branch: refs/heads/jira/solr-8668
Commit: 71ce0d31a6a907bf1566fc51324d5f26e4205c21
Parents: 114a65b
Author: yonik <yo...@apache.org>
Authored: Mon Apr 24 18:17:17 2017 -0400
Committer: yonik <yo...@apache.org>
Committed: Mon Apr 24 18:17:17 2017 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                | 12 +++++++
 .../solr/search/facet/FacetFieldMerger.java     | 14 ++------
 .../solr/search/facet/FacetFieldProcessor.java  | 36 +++++++++++---------
 .../solr/search/facet/TestJsonFacets.java       |  8 ++---
 4 files changed, 38 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ce0d31/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 8fbebdb..19fc3f8 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -139,6 +139,10 @@ Upgrade Notes
 
 * Solr contribs map-reduce, morphlines-core and morphlines-cell have been removed.
 
+* JSON Facet API now uses hyper-log-log for numBuckets cardinality calculation and
+  calculates cardinality before filtering buckets by any mincount greater than 1.
+
+
 Detailed Change List
 ----------------------
 
@@ -192,6 +196,9 @@ Optimizations
 * SOLR-9217: Reduced heap consumption for filter({!join ... score=...}) 
   (Andrey Kudryavtsev, Gopikannan Venugopalsamy via Mikhail Khludnev)
 
+* SOLR-10548: JSON Facet API now uses hyper-log-log++ for determining the number of buckets
+  when merging requests from a multi-shard distributed request. (yonik)
+
 Bug Fixes
 ----------------------
 * SOLR-10281: ADMIN_PATHS is duplicated in two places and inconsistent. This can cause automatic
@@ -230,6 +237,11 @@ Bug Fixes
 
 * SOLR-10493: Investigate SolrCloudExampleTest failures. (Erick Erickson)
 
+* SOLR-10552: JSON Facet API numBuckets was not consistent between distributed and non-distributed requests
+  when there was a mincount > 1.  This has been corrected by changing numBuckets cardinality processing to
+  ignore mincount > 1 for non-distributed requests. (yonik)
+
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ce0d31/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
index 9ec5d79..4f57bcd 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
@@ -82,7 +82,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
       Object nb = facetResult.get("numBuckets");
       if (nb != null) {
         if (numBuckets == null) {
-          numBuckets = new FacetNumBucketsMerger();
+          numBuckets = new HLLAgg("hll_merger").createFacetMerger(nb);
         }
         numBuckets.merge(nb , mcontext);
       }
@@ -98,17 +98,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
     SimpleOrderedMap result = new SimpleOrderedMap();
 
     if (numBuckets != null) {
-      int removed = 0;
-      if (freq.mincount > 1) {
-        for (FacetBucket bucket : buckets.values()) {
-          if (bucket.count < freq.mincount) removed++;
-        }
-      }
-      result.add("numBuckets", ((Number)numBuckets.getMergedResult()).longValue() - removed);
-
-      // TODO: we can further increase this estimate.
-      // If not sorting by count, use a simple ratio to scale
-      // If sorting by count desc, then add up the highest_possible_missing_count from each shard
+      result.add("numBuckets", ((Number)numBuckets.getMergedResult()).longValue());
     }
 
     sortBuckets();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ce0d31/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index d4daf08..65b88d8 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -210,10 +210,6 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
                                         IntFunction<Comparable> bucketValFromSlotNumFunc,
                                         Function<Comparable, String> fieldQueryValFunc) throws IOException {
     int numBuckets = 0;
-    List<Object> bucketVals = null;
-    if (freq.numBuckets && fcontext.isShard()) {
-      bucketVals = new ArrayList<>(100);
-    }
 
     final int off = fcontext.isShard() ? 0 : (int) freq.offset;
 
@@ -257,16 +253,18 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     Slot bottom = null;
     Slot scratchSlot = new Slot();
     for (int slotNum = 0; slotNum < numSlots; slotNum++) {
-      // screen out buckets not matching mincount immediately (i.e. don't even increment numBuckets)
-      if (effectiveMincount > 0 && countAcc.getCount(slotNum) < effectiveMincount) {
-        continue;
+
+      // screen out buckets not matching mincount
+      if (effectiveMincount > 0) {
+        int count = countAcc.getCount(slotNum);
+        if (count  < effectiveMincount) {
+          if (count > 0)
+            numBuckets++;  // Still increment numBuckets as long as we have some count.  This is for consistency between distrib and non-distrib mode.
+          continue;
+        }
       }
 
       numBuckets++;
-      if (bucketVals != null && bucketVals.size()<100) {
-        Object val = bucketValFromSlotNumFunc.apply(slotNum);
-        bucketVals.add(val);
-      }
 
       if (bottom != null) {
         scratchSlot.slot = slotNum; // scratchSlot is only used to hold this slotNum for the following line
@@ -292,10 +290,17 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
       if (!fcontext.isShard()) {
         res.add("numBuckets", numBuckets);
       } else {
-        SimpleOrderedMap<Object> map = new SimpleOrderedMap<>(2);
-        map.add("numBuckets", numBuckets);
-        map.add("vals", bucketVals);
-        res.add("numBuckets", map);
+        DocSet domain = fcontext.base;
+        if (freq.prefix != null) {
+          Query prefixFilter = sf.getType().getPrefixQuery(null, sf, freq.prefix);
+          domain = fcontext.searcher.getDocSet(prefixFilter, domain);
+        }
+
+        HLLAgg agg = new HLLAgg(freq.field);
+        SlotAcc acc = agg.createSlotAcc(fcontext, domain.size(), 1);
+        acc.collect(domain, 0);
+        acc.key = "numBuckets";
+        acc.setValues(res, 0);
       }
     }
 
@@ -522,7 +527,6 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
      "all",
      {"cat3":{"_l":["A"]}}]]},
    "cat1":{"_l":["A"]}}}
-
    */
 
   static <T> List<T> asList(Object list) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/71ce0d31/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index bad3de5..a8a1eaa 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -770,12 +770,12 @@ public class TestJsonFacets extends SolrTestCaseHS {
             "'f1':{ numBuckets:1, buckets:[{val:B, count:3}]} } "
     );
 
-    // mincount should lower numBuckets
+    // mincount should not lower numBuckets (since SOLR-10552)
     client.testJQ(params(p, "q", "*:*", "rows", "0", "facet", "true"
             , "json.facet", "{f1:{terms:{${terms} field:${cat_s}, numBuckets:true, mincount:3}}}"
         )
         , "facets=={ 'count':6, " +
-            "'f1':{ numBuckets:1, buckets:[{val:B, count:3}]} } "
+            "'f1':{ numBuckets:2, buckets:[{val:B, count:3}]} } "
     );
 
     // basic range facet
@@ -1136,7 +1136,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
                 ",f3:{${terms}  type:field, field:${num_i}, sort:'index asc' }" +
                 ",f4:{${terms}  type:field, field:${num_i}, sort:'index desc' }" +
                 ",f5:{${terms}  type:field, field:${num_i}, sort:'index desc', limit:1, missing:true, allBuckets:true, numBuckets:true }" +
-                ",f6:{${terms}  type:field, field:${num_i}, sort:'index desc', mincount:2, numBuckets:true }" +   // mincount should lower numbuckets
+                ",f6:{${terms}  type:field, field:${num_i}, sort:'index desc', mincount:2, numBuckets:true }" +   // mincount should not lower numbuckets (since SOLR-10552)
                 ",f7:{${terms}  type:field, field:${num_i}, sort:'index desc', offset:2, numBuckets:true }" +     // test offset
                 ",f8:{${terms}  type:field, field:${num_i}, sort:'index desc', offset:100, numBuckets:true }" +   // test high offset
                 ",f9:{${terms}  type:field, field:${num_i}, sort:'x desc', facet:{x:'avg(${num_d})'}, missing:true, allBuckets:true, numBuckets:true }" +            // test stats
@@ -1150,7 +1150,7 @@ public class TestJsonFacets extends SolrTestCaseHS {
             ",f3:{ buckets:[{val:-5,count:2},{val:2,count:1},{val:3,count:1},{val:7,count:1} ] } " +
             ",f4:{ buckets:[{val:7,count:1},{val:3,count:1},{val:2,count:1},{val:-5,count:2} ] } " +
             ",f5:{ buckets:[{val:7,count:1}]   , numBuckets:4, allBuckets:{count:5}, missing:{count:1}  } " +
-            ",f6:{ buckets:[{val:-5,count:2}]  , numBuckets:1  } " +
+            ",f6:{ buckets:[{val:-5,count:2}]  , numBuckets:4  } " +
             ",f7:{ buckets:[{val:2,count:1},{val:-5,count:2}] , numBuckets:4 } " +
             ",f8:{ buckets:[] , numBuckets:4 } " +
             ",f9:{ buckets:[{val:7,count:1,x:11.0},{val:2,count:1,x:4.0},{val:3,count:1,x:2.0},{val:-5,count:2,x:-7.0} ],  numBuckets:4, allBuckets:{count:5,x:0.6},missing:{count:1,x:0.0} } " +  // TODO: should missing exclude "x" because no values were collected?


[03/17] lucene-solr:jira/solr-8668: LUCENE-7791: add tests for index sorting with sparse text fields and norms

Posted by cp...@apache.org.
LUCENE-7791: add tests for index sorting with sparse text fields and norms


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/3316f47b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/3316f47b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/3316f47b

Branch: refs/heads/jira/solr-8668
Commit: 3316f47bcf110851ebf7f70b835027a9769bccd2
Parents: 0404e37
Author: Jim Ferenczi <ji...@apache.org>
Authored: Fri Apr 21 12:01:09 2017 +0200
Committer: Jim Ferenczi <ji...@apache.org>
Committed: Fri Apr 21 12:01:09 2017 +0200

----------------------------------------------------------------------
 .../src/test/org/apache/lucene/index/TestIndexSorting.java   | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/3316f47b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index ef5d4ef..be3a2af 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -2411,12 +2411,15 @@ public class TestIndexSorting extends LuceneTestCase {
     Sort indexSort = new Sort(sortField);
     iwc.setIndexSort(indexSort);
     IndexWriter w = new IndexWriter(dir, iwc);
+    Field textField = newTextField("sparse_text", "", Field.Store.NO);
     for (int i = 0; i < 128; i++) {
       Document doc = new Document();
       doc.add(new NumericDocValuesField("dense_int", i));
       if (i < 64) {
         doc.add(new NumericDocValuesField("sparse_int", i));
         doc.add(new BinaryDocValuesField("sparse_binary", new BytesRef(Integer.toString(i))));
+        textField.setStringValue("foo");
+        doc.add(textField);
       }
       w.addDocument(doc);
     }
@@ -2429,6 +2432,7 @@ public class TestIndexSorting extends LuceneTestCase {
     NumericDocValues denseValues = leafReader.getNumericDocValues("dense_int");
     NumericDocValues sparseValues = leafReader.getNumericDocValues("sparse_int");
     BinaryDocValues sparseBinaryValues = leafReader.getBinaryDocValues("sparse_binary");
+    NumericDocValues normsValues = leafReader.getNormValues("sparse_text");
     for(int docID = 0; docID < 128; docID++) {
       assertTrue(denseValues.advanceExact(docID));
       assertEquals(127-docID, (int) denseValues.longValue());
@@ -2436,12 +2440,14 @@ public class TestIndexSorting extends LuceneTestCase {
         assertTrue(denseValues.advanceExact(docID));
         assertTrue(sparseValues.advanceExact(docID));
         assertTrue(sparseBinaryValues.advanceExact(docID));
-        assertEquals(docID, sparseValues.docID());
+        assertTrue(normsValues.advanceExact(docID));
+        assertEquals(124, normsValues.longValue());
         assertEquals(127-docID, (int) sparseValues.longValue());
         assertEquals(new BytesRef(Integer.toString(127-docID)), sparseBinaryValues.binaryValue());
       } else {
         assertFalse(sparseBinaryValues.advanceExact(docID));
         assertFalse(sparseValues.advanceExact(docID));
+        assertFalse(normsValues.advanceExact(docID));
       }
     }
     IOUtils.close(r, w, dir);


[16/17] lucene-solr:jira/solr-8668: Merge branch 'master' into jira/solr-8668

Posted by cp...@apache.org.
Merge branch 'master' into jira/solr-8668


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/7a2a47f5
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/7a2a47f5
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/7a2a47f5

Branch: refs/heads/jira/solr-8668
Commit: 7a2a47f534040d28864cd4fe914de380eac62e24
Parents: 796d5d1 680f4d7
Author: Christine Poerschke <cp...@apache.org>
Authored: Tue Apr 25 14:38:18 2017 +0100
Committer: Christine Poerschke <cp...@apache.org>
Committed: Tue Apr 25 14:38:18 2017 +0100

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../org/apache/lucene/store/FSDirectory.java    |   2 +-
 .../apache/lucene/index/TestIndexSorting.java   |  82 ++++++++++
 .../org/apache/lucene/store/TestDirectory.java  |  17 ++-
 solr/CHANGES.txt                                |  23 +++
 .../org/apache/solr/cloud/CloudDescriptor.java  |  20 +++
 .../org/apache/solr/core/CoreContainer.java     |  40 ++++-
 .../apache/solr/core/CorePropertiesLocator.java |  18 +--
 .../java/org/apache/solr/core/CoresLocator.java |  10 --
 .../org/apache/solr/handler/StreamHandler.java  |   7 +-
 .../handler/admin/MetricsCollectorHandler.java  |  13 +-
 .../solr/handler/admin/MetricsHandler.java      |   2 +-
 .../handler/component/SpatialHeatmapFacets.java |  41 ++---
 .../reporters/solr/SolrClusterReporter.java     |   1 +
 .../metrics/reporters/solr/SolrReporter.java    |  21 ++-
 .../reporters/solr/SolrShardReporter.java       |   1 +
 .../solr/search/facet/FacetFieldMerger.java     |  14 +-
 .../solr/search/facet/FacetFieldProcessor.java  |  36 +++--
 .../join/BlockJoinFacetComponentSupport.java    |  12 +-
 .../search/join/ScoreJoinQParserPlugin.java     |  11 +-
 .../apache/solr/cloud/SolrCloudExampleTest.java |  81 +++++++++-
 .../org/apache/solr/core/TestCoreContainer.java |   5 -
 .../solr/handler/admin/MetricsHandlerTest.java  |  24 +--
 .../solr/handler/admin/StatsReloadRaceTest.java |  10 +-
 .../reporters/solr/SolrCloudReportersTest.java  |   8 +-
 .../org/apache/solr/schema/DocValuesTest.java   |  72 ---------
 .../solr/schema/TestHalfAndHalfDocValues.java   | 136 +++++++++++++++++
 .../solr/search/facet/TestJsonFacets.java       |   8 +-
 .../search/join/BlockJoinFacetDistribTest.java  |  10 +-
 .../search/join/TestScoreJoinQPNoScore.java     |   7 +-
 .../solr/client/solrj/io/stream/CellStream.java | 152 +++++++++++++++++++
 .../solr/client/solrj/io/stream/GetStream.java  | 117 ++++++++++++++
 .../solr/client/solrj/io/stream/LetStream.java  | 152 +++++++++++++++++++
 .../solr/client/solrj/io/stream/ListStream.java | 145 ++++++++++++++++++
 .../client/solrj/io/stream/StreamContext.java   |   7 +
 .../solrj/io/stream/StreamExpressionTest.java   | 141 +++++++++++++++++
 .../apache/solr/util/ReadOnlyCoresLocator.java  |   6 -
 .../java/org/apache/solr/util/TestHarness.java  |   4 -
 38 files changed, 1245 insertions(+), 214 deletions(-)
----------------------------------------------------------------------



[12/17] lucene-solr:jira/solr-8668: SOLR-10557: Make "compact" format default for /admin/metrics.

Posted by cp...@apache.org.
SOLR-10557: Make "compact" format default for /admin/metrics.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/114a65b3
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/114a65b3
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/114a65b3

Branch: refs/heads/jira/solr-8668
Commit: 114a65b3316654507a434bf90793b5159022d6c7
Parents: e17b987
Author: Andrzej Bialecki <ab...@apache.org>
Authored: Mon Apr 24 22:34:46 2017 +0200
Committer: Andrzej Bialecki <ab...@apache.org>
Committed: Mon Apr 24 22:35:18 2017 +0200

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 ++
 .../handler/admin/MetricsCollectorHandler.java  | 13 ++++++++---
 .../solr/handler/admin/MetricsHandler.java      |  2 +-
 .../reporters/solr/SolrClusterReporter.java     |  1 +
 .../metrics/reporters/solr/SolrReporter.java    | 21 ++++++++++++++---
 .../reporters/solr/SolrShardReporter.java       |  1 +
 .../solr/handler/admin/MetricsHandlerTest.java  | 24 ++++++++++----------
 .../reporters/solr/SolrCloudReportersTest.java  |  8 +++----
 8 files changed, 49 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 35fd327..8fbebdb 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -117,6 +117,8 @@ Other Changes
   Add support for selecting specific properties from any compound metric using 'property' parameter to
   /admin/metrics handler. (ab)
 
+* SOLR-10557: Make "compact" format default for /admin/metrics. (ab)
+
 ----------------------
 
 ==================  6.6.0 ==================

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
index 8474f55..3d8b6e0 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsCollectorHandler.java
@@ -174,9 +174,15 @@ public class MetricsCollectorHandler extends RequestHandlerBase {
       String labelId = (String)doc.getFieldValue(SolrReporter.LABEL_ID);
       doc.remove(SolrReporter.LABEL_ID);
       doc.forEach(f -> {
-        String key = MetricRegistry.name(labelId, metricName, f.getName());
+        String key;
+        if (doc.size() == 1 && f.getName().equals(MetricUtils.VALUE)) {
+          // only one "value" field - skip the unnecessary field name
+          key = MetricRegistry.name(labelId, metricName);
+        } else {
+          key = MetricRegistry.name(labelId, metricName, f.getName());
+        }
         MetricRegistry registry = metricManager.registry(groupId);
-        AggregateMetric metric = getOrRegister(registry, key, new AggregateMetric());
+        AggregateMetric metric = getOrCreate(registry, key);
         Object o = f.getFirstValue();
         if (o != null) {
           metric.set(reporterId, o);
@@ -187,11 +193,12 @@ public class MetricsCollectorHandler extends RequestHandlerBase {
       });
     }
 
-    private AggregateMetric getOrRegister(MetricRegistry registry, String name, AggregateMetric add) {
+    private AggregateMetric getOrCreate(MetricRegistry registry, String name) {
       AggregateMetric existing = (AggregateMetric)registry.getMetrics().get(name);
       if (existing != null) {
         return existing;
       }
+      AggregateMetric add = new AggregateMetric();
       try {
         registry.register(name, add);
         return add;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index 9dda6ae..11f6821 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -83,7 +83,7 @@ public class MetricsHandler extends RequestHandlerBase implements PermissionName
       throw new SolrException(SolrException.ErrorCode.INVALID_STATE, "Core container instance not initialized");
     }
 
-    boolean compact = req.getParams().getBool(COMPACT_PARAM, false);
+    boolean compact = req.getParams().getBool(COMPACT_PARAM, true);
     MetricFilter mustMatchFilter = parseMustMatchFilter(req);
     MetricUtils.PropertyFilter propertyFilter = parsePropertyFilter(req);
     List<MetricType> metricTypes = parseMetricTypes(req);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
index c437457..c677bea 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrClusterReporter.java
@@ -217,6 +217,7 @@ public class SolrClusterReporter extends SolrMetricReporter {
         .convertDurationsTo(TimeUnit.MILLISECONDS)
         .withHandler(handler)
         .withReporterId(reporterId)
+        .setCompact(true)
         .cloudClient(false) // we want to send reports specifically to a selected leader instance
         .skipAggregateValues(true) // we don't want to transport details of aggregates
         .skipHistograms(true) // we don't want to transport histograms

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
index 1923877..8d36cef 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
@@ -126,6 +126,7 @@ public class SolrReporter extends ScheduledReporter {
     private boolean skipHistograms;
     private boolean skipAggregateValues;
     private boolean cloudClient;
+    private boolean compact;
     private SolrParams params;
 
     /**
@@ -146,6 +147,7 @@ public class SolrReporter extends ScheduledReporter {
       this.skipHistograms = false;
       this.skipAggregateValues = false;
       this.cloudClient = false;
+      this.compact = true;
       this.params = null;
     }
 
@@ -170,6 +172,16 @@ public class SolrReporter extends ScheduledReporter {
     }
 
     /**
+     * If true then use "compact" data representation.
+     * @param compact compact representation.
+     * @return {@code this}
+     */
+    public Builder setCompact(boolean compact) {
+      this.compact = compact;
+      return this;
+    }
+
+    /**
      * Histograms are difficult / impossible to aggregate, so it may not be
      * worth to report them.
      * @param skipHistograms when true then skip histograms from reports
@@ -244,7 +256,7 @@ public class SolrReporter extends ScheduledReporter {
      */
     public SolrReporter build(HttpClient client, Supplier<String> urlProvider) {
       return new SolrReporter(client, urlProvider, metricManager, reports, handler, reporterId, rateUnit, durationUnit,
-          params, skipHistograms, skipAggregateValues, cloudClient);
+          params, skipHistograms, skipAggregateValues, cloudClient, compact);
     }
 
   }
@@ -258,6 +270,7 @@ public class SolrReporter extends ScheduledReporter {
   private boolean skipHistograms;
   private boolean skipAggregateValues;
   private boolean cloudClient;
+  private boolean compact;
   private ModifiableSolrParams params;
   private Map<String, Object> metadata;
 
@@ -288,7 +301,8 @@ public class SolrReporter extends ScheduledReporter {
   public SolrReporter(HttpClient httpClient, Supplier<String> urlProvider, SolrMetricManager metricManager,
                       List<Report> metrics, String handler,
                       String reporterId, TimeUnit rateUnit, TimeUnit durationUnit,
-                      SolrParams params, boolean skipHistograms, boolean skipAggregateValues, boolean cloudClient) {
+                      SolrParams params, boolean skipHistograms, boolean skipAggregateValues,
+                      boolean cloudClient, boolean compact) {
     super(null, "solr-reporter", MetricFilter.ALL, rateUnit, durationUnit);
     this.metricManager = metricManager;
     this.urlProvider = urlProvider;
@@ -311,6 +325,7 @@ public class SolrReporter extends ScheduledReporter {
     this.skipHistograms = skipHistograms;
     this.skipAggregateValues = skipAggregateValues;
     this.cloudClient = cloudClient;
+    this.compact = compact;
     this.params = new ModifiableSolrParams();
     this.params.set(REPORTER_ID, reporterId);
     // allow overrides to take precedence
@@ -361,7 +376,7 @@ public class SolrReporter extends ScheduledReporter {
         }
         final String effectiveGroup = group;
         MetricUtils.toSolrInputDocuments(metricManager.registry(registryName), Collections.singletonList(report.filter), MetricFilter.ALL,
-            MetricUtils.PropertyFilter.ALL, skipHistograms, skipAggregateValues, false, metadata, doc -> {
+            MetricUtils.PropertyFilter.ALL, skipHistograms, skipAggregateValues, compact, metadata, doc -> {
               doc.setField(REGISTRY_ID, registryName);
               doc.setField(GROUP_ID, effectiveGroup);
               if (effectiveLabel != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
index 0cf14db..6ae84ac 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
@@ -158,6 +158,7 @@ public class SolrShardReporter extends SolrMetricReporter {
         .convertDurationsTo(TimeUnit.MILLISECONDS)
         .withHandler(handler)
         .withReporterId(id)
+        .setCompact(true)
         .cloudClient(false) // we want to send reports specifically to a selected leader instance
         .skipAggregateValues(true) // we don't want to transport details of aggregates
         .skipHistograms(true) // we don't want to transport histograms

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index eb86b1b..402cc25 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -45,7 +45,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
 
     SolrQueryResponse resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json"), resp);
     NamedList values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -64,7 +64,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertEquals(5, ((Map) nl.get("ADMIN./admin/authorization.clientErrors")).size());
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "jvm,jetty"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm,jetty"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -74,7 +74,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
 
     resp = new SolrQueryResponse();
     // "collection" works too, because it's a prefix for "collection1"
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "registry", "solr.core.collection,solr.jvm"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "registry", "solr.core.collection,solr.jvm"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -84,7 +84,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
 
     resp = new SolrQueryResponse();
     // "collection" works too, because it's a prefix for "collection1"
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "registry", "solr.core.collection", "registry", "solr.jvm"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "registry", "solr.core.collection", "registry", "solr.jvm"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -93,7 +93,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("solr.jvm"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "jvm,jetty"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm,jetty"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -102,7 +102,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("solr.jvm"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "jvm", "group", "jetty"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm", "group", "jetty"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -111,7 +111,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("solr.jvm"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "node", "type", "counter"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "node", "type", "counter"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -121,7 +121,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNull(values.get("ADMIN./admin/authorization.errors")); // this is a timer node
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CONTAINER.cores,CONTAINER.threadPool"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "prefix", "CONTAINER.cores,CONTAINER.threadPool"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -134,7 +134,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("CONTAINER.threadPool.coreLoadExecutor.completed"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CONTAINER.cores", "regex", "C.*thread.*completed"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "prefix", "CONTAINER.cores", "regex", "C.*thread.*completed"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -145,7 +145,7 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(values.get("CONTAINER.threadPool.coreLoadExecutor.completed"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CACHE.core.fieldCache", "property", "entries_count", "compact", "true"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "prefix", "CACHE.core.fieldCache", "property", "entries_count", MetricsHandler.COMPACT_PARAM, "true"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
@@ -157,14 +157,14 @@ public class MetricsHandlerTest extends SolrTestCaseJ4 {
     assertNotNull(m.get("entries_count"));
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "jvm", "prefix", "CONTAINER.cores"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm", "prefix", "CONTAINER.cores"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
     assertEquals(0, values.size());
 
     resp = new SolrQueryResponse();
-    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json", "group", "node", "type", "timer", "prefix", "CONTAINER.cores"), resp);
+    handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "node", "type", "timer", "prefix", "CONTAINER.cores"), resp);
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     SimpleOrderedMap map = (SimpleOrderedMap) values.get("metrics");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/114a65b3/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
index f527a17..df7e642 100644
--- a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java
@@ -119,20 +119,20 @@ public class SolrCloudReportersTest extends SolrCloudTestCase {
         assertEquals(reporters.toString(), 0, reporters.size());
         // verify specific metrics
         Map<String, Metric> metrics = metricManager.registry(registryName).getMetrics();
-        String key = "QUERY./select.requests.count";
+        String key = "QUERY./select.requests";
         assertTrue(key, metrics.containsKey(key));
         assertTrue(key, metrics.get(key) instanceof AggregateMetric);
-        key = "UPDATE./update/json.requests.count";
+        key = "UPDATE./update/json.requests";
         assertTrue(key, metrics.containsKey(key));
         assertTrue(key, metrics.get(key) instanceof AggregateMetric);
       }
       if (metricManager.registryNames().contains("solr.cluster")) {
         clusterRegistries++;
         Map<String,Metric> metrics = metricManager.registry("solr.cluster").getMetrics();
-        String key = "jvm.memory.heap.init.value";
+        String key = "jvm.memory.heap.init";
         assertTrue(key, metrics.containsKey(key));
         assertTrue(key, metrics.get(key) instanceof AggregateMetric);
-        key = "leader.test_collection.shard1.UPDATE./update/json.requests.count.max";
+        key = "leader.test_collection.shard1.UPDATE./update/json.requests.max";
         assertTrue(key, metrics.containsKey(key));
         assertTrue(key, metrics.get(key) instanceof AggregateMetric);
       }


[11/17] lucene-solr:jira/solr-8668: SOLR-10493: Investigate SolrCloudExampleTest failures.

Posted by cp...@apache.org.
SOLR-10493: Investigate SolrCloudExampleTest failures.

(cherry picked from commit 0247acd)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/e17b9877
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/e17b9877
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/e17b9877

Branch: refs/heads/jira/solr-8668
Commit: e17b987734cae654e01d58876e3fc05eea1bb605
Parents: 56e1ad4
Author: Erick Erickson <er...@apache.org>
Authored: Mon Apr 24 12:17:46 2017 -0700
Committer: Erick Erickson <er...@apache.org>
Committed: Mon Apr 24 12:22:11 2017 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  2 +
 .../org/apache/solr/cloud/CloudDescriptor.java  | 20 +++++
 .../org/apache/solr/core/CoreContainer.java     | 40 ++++++++--
 .../apache/solr/core/CorePropertiesLocator.java | 18 ++---
 .../java/org/apache/solr/core/CoresLocator.java | 10 ---
 .../apache/solr/cloud/SolrCloudExampleTest.java | 81 +++++++++++++++++++-
 .../org/apache/solr/core/TestCoreContainer.java |  5 --
 .../apache/solr/util/ReadOnlyCoresLocator.java  |  6 --
 .../java/org/apache/solr/util/TestHarness.java  |  4 -
 9 files changed, 142 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 5ba38d9..35fd327 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -226,6 +226,8 @@ Bug Fixes
 * SOLR-5127: Multiple highlight fields and wildcards are now supported e.g. hl.fl=title,text_*
   (Sven-S. Porst, Daniel Debray, Simon Endele, Christine Poerschke)
 
+* SOLR-10493: Investigate SolrCloudExampleTest failures. (Erick Erickson)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java b/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
index fdc7b02..719b1d1 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudDescriptor.java
@@ -21,6 +21,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import com.google.common.base.Strings;
+import org.apache.solr.common.StringUtils;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.util.PropertiesUtil;
@@ -135,4 +136,23 @@ public class CloudDescriptor {
     if(nodeName==null) cd.getPersistableStandardProperties().remove(CoreDescriptor.CORE_NODE_NAME);
     else cd.getPersistableStandardProperties().setProperty(CoreDescriptor.CORE_NODE_NAME, nodeName);
   }
+
+  public void reload(CloudDescriptor reloadFrom) {
+    if (reloadFrom == null) return;
+
+    setShardId(StringUtils.isEmpty(reloadFrom.getShardId()) ? getShardId() : reloadFrom.getShardId());
+    setCollectionName(StringUtils.isEmpty(reloadFrom.getCollectionName()) ? getCollectionName() : reloadFrom.getCollectionName());
+    setRoles(StringUtils.isEmpty(reloadFrom.getRoles()) ? getRoles() : reloadFrom.getRoles());
+    if (reloadFrom.getNumShards() != null) {
+      setNumShards(reloadFrom.getNumShards());
+    }
+    setCoreNodeName(StringUtils.isEmpty(reloadFrom.getCoreNodeName()) ? getCoreNodeName() : reloadFrom.getCoreNodeName());
+    setLeader(reloadFrom.isLeader);
+    setHasRegistered(reloadFrom.hasRegistered);
+    setLastPublished(reloadFrom.getLastPublished());
+
+    for (Map.Entry<String, String> ent : reloadFrom.getParams().entrySet()) {
+      collectionParams.put(ent.getKey(), ent.getValue());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/core/src/java/org/apache/solr/core/CoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index 3c3aaa5..28c1eaf 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -104,6 +104,7 @@ import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.METRICS_PATH;
 import static org.apache.solr.common.params.CommonParams.ZK_PATH;
+import static org.apache.solr.core.CorePropertiesLocator.PROPERTIES_FILENAME;
 import static org.apache.solr.security.AuthenticationPlugin.AUTHENTICATION_PLUGIN_PROP;
 
 /**
@@ -1130,7 +1131,37 @@ public class CoreContainer {
   }
 
 
-  // ---------------- Core name related methods --------------- 
+  // ---------------- Core name related methods ---------------
+
+  private CoreDescriptor reloadCoreDescriptor(CoreDescriptor oldDesc) {
+    if (oldDesc == null) {
+      return null;
+    }
+
+    CorePropertiesLocator cpl = new CorePropertiesLocator(null);
+    CoreDescriptor ret = cpl.buildCoreDescriptor(oldDesc.getInstanceDir().resolve(PROPERTIES_FILENAME), this);
+
+    // Ok, this little jewel is all because we still create core descriptors on the fly from lists of properties
+    // in tests particularly. Theoretically, there should be _no_ way to create a CoreDescriptor in the new world
+    // of core discovery without writing the core.properties file out first.
+    //
+    // TODO: remove core.properties from the conf directory in test files, it's in a bad place there anyway.
+    if (ret == null) {
+      oldDesc.loadExtraProperties(); // there may be changes to extra properties that we need to pick up.
+      return oldDesc;
+      
+    }
+    // The CloudDescriptor bit here is created in a very convoluted way, requiring access to private methods
+    // in ZkController. When reloading, this behavior is identical to what used to happen where a copy of the old
+    // CoreDescriptor was just re-used.
+    
+    if (ret.getCloudDescriptor() != null) {
+      ret.getCloudDescriptor().reload(oldDesc.getCloudDescriptor());
+    }
+
+    return ret;
+  }
+
   /**
    * Recreates a SolrCore.
    * While the new core is loading, requests will continue to be dispatched to
@@ -1141,11 +1172,10 @@ public class CoreContainer {
   public void reload(String name) {
     SolrCore core = solrCores.getCoreFromAnyList(name, false);
     if (core != null) {
+      
       // The underlying core properties files may have changed, we don't really know. So we have a (perhaps) stale
-      // CoreDescriptor we need to reload it if it's out there. 
-      CorePropertiesLocator cpl = new CorePropertiesLocator(null);
-      CoreDescriptor cd = cpl.reload(this, core.getCoreDescriptor());
-      if (cd == null) cd = core.getCoreDescriptor();
+      // CoreDescriptor and we need to reload it from the disk files
+      CoreDescriptor cd = reloadCoreDescriptor(core.getCoreDescriptor());
       solrCores.addCoreDescriptor(cd);
       try {
         solrCores.waitAddPendingCoreOps(cd.getName());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
index 385d11b..e942c9b 100644
--- a/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
+++ b/solr/core/src/java/org/apache/solr/core/CorePropertiesLocator.java
@@ -134,8 +134,10 @@ public class CorePropertiesLocator implements CoresLocator {
         public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
           if (file.getFileName().toString().equals(PROPERTIES_FILENAME)) {
             CoreDescriptor cd = buildCoreDescriptor(file, cc);
-            logger.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
-            cds.add(cd);
+            if (cd != null) {
+              logger.debug("Found core {} in {}", cd.getName(), cd.getInstanceDir());
+              cds.add(cd);
+            }
             return FileVisitResult.SKIP_SIBLINGS;
           }
           return FileVisitResult.CONTINUE;
@@ -163,14 +165,6 @@ public class CorePropertiesLocator implements CoresLocator {
     return cds;
   }
 
-  @Override
-  public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
-    if (cd == null) return null;
-    
-    Path coreProps = cd.getInstanceDir().resolve(CoreDescriptor.DEFAULT_EXTERNAL_PROPERTIES_FILE);
-    return buildCoreDescriptor(coreProps, cc);
-  }
-
   protected CoreDescriptor buildCoreDescriptor(Path propertiesFile, CoreContainer cc) {
 
     Path instanceDir = propertiesFile.getParent();
@@ -182,7 +176,9 @@ public class CorePropertiesLocator implements CoresLocator {
       for (String key : coreProperties.stringPropertyNames()) {
         propMap.put(key, coreProperties.getProperty(key));
       }
-      return new CoreDescriptor(name, instanceDir, propMap, cc.getContainerProperties(), cc.isZooKeeperAware());
+      CoreDescriptor ret = new CoreDescriptor(name, instanceDir, propMap, cc.getContainerProperties(), cc.isZooKeeperAware());
+      ret.loadExtraProperties();
+      return ret;
     }
     catch (IOException e) {
       logger.error("Couldn't load core descriptor from {}:{}", propertiesFile, e.toString());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/core/src/java/org/apache/solr/core/CoresLocator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/CoresLocator.java b/solr/core/src/java/org/apache/solr/core/CoresLocator.java
index beaa690..52927f1 100644
--- a/solr/core/src/java/org/apache/solr/core/CoresLocator.java
+++ b/solr/core/src/java/org/apache/solr/core/CoresLocator.java
@@ -68,14 +68,4 @@ public interface CoresLocator {
    * @return a list of all CoreDescriptors found
    */
   public List<CoreDescriptor> discover(CoreContainer cc);
-
-  /**
-   * reload an existing CoreDescriptor, that is read it from disk.
-   * 
-   * @param cc the CoreContainer
-   * @param cd the old CoreDescriptor. If null, this is a no-op
-   * @return the reloaded coreDescriptor or null          
-   */
-  public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd);
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
index 9d415bc..8c49f6b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SolrCloudExampleTest.java
@@ -18,24 +18,40 @@ package org.apache.solr.cloud;
 
 import java.io.File;
 import java.io.FilenameFilter;
+import java.io.StringReader;
 import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
 
 import org.apache.commons.cli.CommandLine;
+import org.apache.http.HttpEntity;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.cloud.DocCollection;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
+import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.util.ExternalPaths;
 import org.apache.solr.util.SolrCLI;
 import org.junit.Test;
+import org.noggit.JSONParser;
+import org.noggit.ObjectBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.util.Arrays.asList;
+import static org.apache.solr.common.util.Utils.getObjectByPath;
+
 /**
  * Emulates bin/solr -e cloud -noprompt; bin/post -c gettingstarted example/exampledocs/*.xml;
  * this test is useful for catching regressions in indexing the example docs in collections that
@@ -129,10 +145,16 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
       cloudClient.request(req);
     }
     cloudClient.commit();
-    Thread.sleep(1000);
 
-    QueryResponse qr = cloudClient.query(new SolrQuery("*:*"));
-    int numFound = (int)qr.getResults().getNumFound();
+    int numFound = 0;
+
+    // give the update a chance to take effect.
+    for (int idx = 0; idx < 100; ++idx) {
+      QueryResponse qr = cloudClient.query(new SolrQuery("*:*"));
+      numFound = (int) qr.getResults().getNumFound();
+      if (numFound == expectedXmlDocCount) break;
+      Thread.sleep(100);
+    }
     assertEquals("*:* found unexpected number of documents", expectedXmlDocCount, numFound);
 
     log.info("Updating Config for " + testCollectionName);
@@ -192,6 +214,9 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
         "-value", maxTime.toString(),
         "-solrUrl", solrUrl
     };
+
+    Map<String, Long> startTimes = getSoftAutocommitInterval(testCollectionName);
+
     SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool();
     CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
     log.info("Sending set-property '" + prop + "'=" + maxTime + " to SolrCLI.ConfigTool.");
@@ -201,5 +226,55 @@ public class SolrCloudExampleTest extends AbstractFullDistribZkTestBase {
     maxTimeFromConfig = SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson);
     assertNotNull(maxTimeFromConfig);
     assertEquals(maxTime, maxTimeFromConfig);
+
+    log.info("live_nodes_count :  " + cloudClient.getZkStateReader().getClusterState().getLiveNodes());
+
+    // Since it takes some time for this command to complete we need to make sure all the reloads for
+    // all the cores have been done.
+    boolean allGood = false;
+    Map<String, Long> curSoftCommitInterval = null;
+    for (int idx = 0; idx < 600 && allGood == false; ++idx) {
+      curSoftCommitInterval = getSoftAutocommitInterval(testCollectionName);
+      if (curSoftCommitInterval.size() > 0 && curSoftCommitInterval.size() == startTimes.size()) { // no point in even trying if they're not the same size!
+        allGood = true;
+        for (Map.Entry<String, Long> currEntry : curSoftCommitInterval.entrySet()) {
+          if (currEntry.getValue().equals(maxTime) == false) {
+            allGood = false;
+          }
+        }
+      }
+      if (allGood == false) {
+        Thread.sleep(100);
+      }
+    }
+    assertTrue("All cores should have been reloaded within 60 seconds!!!", allGood);
+  }
+
+  // Collect all of the autoSoftCommit intervals.
+  private Map<String, Long> getSoftAutocommitInterval(String collection) throws Exception {
+    Map<String, Long> ret = new HashMap<>();
+    DocCollection coll = cloudClient.getZkStateReader().getClusterState().getCollection(collection);
+    for (Slice slice : coll.getActiveSlices()) {
+      for (Replica replica : slice.getReplicas()) {
+        String uri = "" + replica.get(ZkStateReader.BASE_URL_PROP) + "/" + replica.get(ZkStateReader.CORE_NAME_PROP) + "/config?wt=json";
+        Map respMap = getAsMap(cloudClient, uri);
+        Long maxTime = (Long) (getObjectByPath(respMap, true, asList("config", "updateHandler", "autoSoftCommit", "maxTime")));
+        ret.put(replica.getCoreName(), maxTime);
+      }
+    }
+    return ret;
   }
+
+  private Map getAsMap(CloudSolrClient cloudClient, String uri) throws Exception {
+    HttpGet get = new HttpGet(uri);
+    HttpEntity entity = null;
+    try {
+      entity = cloudClient.getLbClient().getHttpClient().execute(get).getEntity();
+      String response = EntityUtils.toString(entity, StandardCharsets.UTF_8);
+      return (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
+    } finally {
+      EntityUtils.consumeQuietly(entity);
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
index 91bbabb..2949e2e 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
@@ -386,11 +386,6 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
       return cores;
     }
 
-    @Override
-    public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
-      return cd;
-    }
-
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java b/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
index 3ad3ce2..3d11ff7 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/ReadOnlyCoresLocator.java
@@ -47,10 +47,4 @@ public abstract class ReadOnlyCoresLocator implements CoresLocator {
     // no-op
   }
 
-  @Override
-  public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
-    return null; // no-op
-  }
-
-
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/e17b9877/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
----------------------------------------------------------------------
diff --git a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
index b8e1899..cefd75f 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
@@ -231,10 +231,6 @@ public class TestHarness extends BaseTestHarness {
           CoreDescriptor.CORE_SHARD, System.getProperty("shard", "shard1")));
     }
 
-    @Override
-    public CoreDescriptor reload(CoreContainer cc, CoreDescriptor cd) {
-      return cd;
-    }
   }
   
   public CoreContainer getCoreContainer() {


[05/17] lucene-solr:jira/solr-8668: SOLR-10551: Add list and cell Streaming Expressions

Posted by cp...@apache.org.
SOLR-10551: Add list and cell Streaming Expressions


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f6af8d09
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f6af8d09
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f6af8d09

Branch: refs/heads/jira/solr-8668
Commit: f6af8d099932ca3cf611878197174b3f70e5cc85
Parents: 4cd83ea
Author: Joel Bernstein <jb...@apache.org>
Authored: Sat Apr 22 16:38:38 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Sat Apr 22 16:39:02 2017 -0400

----------------------------------------------------------------------
 .../org/apache/solr/handler/StreamHandler.java  |   4 +-
 .../solr/client/solrj/io/stream/CellStream.java | 147 +++++++++++++++++++
 .../solr/client/solrj/io/stream/ListStream.java | 145 ++++++++++++++++++
 .../solrj/io/stream/StreamExpressionTest.java   |  55 +++++++
 4 files changed, 350 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f6af8d09/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
index 515a90b..155933d 100644
--- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java
@@ -161,8 +161,10 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware,
          .withFunctionName("calc", CalculatorStream.class)
       .withFunctionName("eval",EvalStream.class)
       .withFunctionName("echo", EchoStream.class)
+      .withFunctionName("cell", CellStream.class)
+      .withFunctionName("list", ListStream.class)
 
-             // metrics
+      // metrics
          .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)
       .withFunctionName("avg", MeanMetric.class)

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f6af8d09/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
new file mode 100644
index 0000000..1696a1a
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class CellStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+  private TupleStream stream;
+  private String name;
+  private Tuple tuple;
+  private Tuple EOFTuple;
+
+  public CellStream(String name, TupleStream stream) throws IOException {
+    init(name, stream);
+  }
+
+  public CellStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    String name = factory.getValueOperand(expression, 0);
+    List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
+
+    if(streamExpressions.size() != 1){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting 1 stream but found %d",expression, streamExpressions.size()));
+    }
+
+    TupleStream tupleStream = factory.constructStream(streamExpressions.get(0));
+    init(name, tupleStream);
+  }
+
+  private void init(String name, TupleStream tupleStream) {
+    this.name = name;
+    this.stream = tupleStream;
+  }
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    if(includeStreams) {
+      expression.addParameter(((Expressible)stream).toExpression(factory));
+    }
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    StreamExplanation explanation = new StreamExplanation(getStreamNodeId().toString());
+    explanation.setFunctionName(factory.getFunctionName(this.getClass()));
+    explanation.setImplementingClass(this.getClass().getName());
+    explanation.setExpressionType(ExpressionType.STREAM_DECORATOR);
+    explanation.setExpression(toExpression(factory, false).toString());
+    explanation.addChild(stream.toExplanation(factory));
+
+    return explanation;
+  }
+
+  public void setStreamContext(StreamContext context) {
+    this.stream.setStreamContext(context);
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList<TupleStream>();
+    l.add(stream);
+
+    return l;
+  }
+
+  public Tuple read() throws IOException {
+    if(tuple.EOF) {
+      return tuple;
+    } else {
+      Tuple t = tuple;
+      tuple = EOFTuple;
+      return t;
+    }
+  }
+
+  public void close() throws IOException {
+  }
+
+  public void open() throws IOException {
+    try {
+      stream.open();
+      List<Tuple> list = new ArrayList();
+      while(true) {
+        Tuple tuple = stream.read();
+        if(tuple.EOF) {
+          EOFTuple = tuple;
+          break;
+        } else {
+          list.add(tuple);
+        }
+      }
+
+      Map map = new HashMap();
+      map.put(name, list);
+      tuple = new Tuple(map);
+    } finally {
+      stream.close();
+    }
+  }
+
+  /** Return the stream sort - ie, the order in which records are returned */
+  public StreamComparator getStreamSort(){
+    return null;
+  }
+
+  public int getCost() {
+    return 0;
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f6af8d09/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java
new file mode 100644
index 0000000..e295a58
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java
@@ -0,0 +1,145 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.comp.StreamComparator;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation;
+import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
+import org.apache.solr.client.solrj.io.stream.expr.Expressible;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExplanation;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class ListStream extends TupleStream implements Expressible {
+
+  private static final long serialVersionUID = 1;
+  private TupleStream[] streams;
+  private TupleStream currentStream;
+  private int streamIndex;
+
+  public ListStream(TupleStream... streams) throws IOException {
+    init(streams);
+  }
+
+  public ListStream(StreamExpression expression, StreamFactory factory) throws IOException {
+    List<StreamExpression> streamExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, TupleStream.class);
+    TupleStream[] streams = new TupleStream[streamExpressions.size()];
+    for(int idx = 0; idx < streamExpressions.size(); ++idx){
+      streams[idx] = factory.constructStream(streamExpressions.get(idx));
+    }
+
+    init(streams);
+  }
+
+  private void init(TupleStream ... tupleStreams) {
+    this.streams = tupleStreams;
+  }
+
+  @Override
+  public StreamExpression toExpression(StreamFactory factory) throws IOException{
+    return toExpression(factory, true);
+  }
+
+  private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
+    // function name
+    StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    if(includeStreams) {
+      for(TupleStream stream : streams) {
+        expression.addParameter(((Expressible)stream).toExpression(factory));
+      }
+    }
+    return expression;
+  }
+
+  @Override
+  public Explanation toExplanation(StreamFactory factory) throws IOException {
+
+    StreamExplanation explanation = new StreamExplanation(getStreamNodeId().toString());
+    explanation.setFunctionName(factory.getFunctionName(this.getClass()));
+    explanation.setImplementingClass(this.getClass().getName());
+    explanation.setExpressionType(ExpressionType.STREAM_DECORATOR);
+    explanation.setExpression(toExpression(factory, false).toString());
+    for(TupleStream stream : streams) {
+      explanation.addChild(stream.toExplanation(factory));
+    }
+
+    return explanation;
+  }
+
+  public void setStreamContext(StreamContext context) {
+    for(TupleStream stream : streams) {
+      stream.setStreamContext(context);
+    }
+  }
+
+  public List<TupleStream> children() {
+    List<TupleStream> l =  new ArrayList<TupleStream>();
+    for(TupleStream stream : streams) {
+      l.add(stream);
+    }
+    return l;
+  }
+
+  public Tuple read() throws IOException {
+    while(true) {
+      if (currentStream == null) {
+        if (streamIndex < streams.length) {
+          currentStream = streams[streamIndex];
+          currentStream.open();
+        } else {
+          HashMap map = new HashMap();
+          map.put("EOF", true);
+          return new Tuple(map);
+        }
+      }
+
+      Tuple tuple = currentStream.read();
+      if (tuple.EOF) {
+        currentStream.close();
+        currentStream = null;
+        ++streamIndex;
+      } else {
+        return tuple;
+      }
+    }
+  }
+
+  public void close() throws IOException {
+  }
+
+  public void open() throws IOException {
+
+
+  }
+
+  /** Return the stream sort - ie, the order in which records are returned */
+  public StreamComparator getStreamSort(){
+    return null;
+  }
+
+  public int getCost() {
+    return 0;
+  }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f6af8d09/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index ce0a4ce..2505639 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -242,6 +242,9 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     }
   }
 
+
+
+
   @Test
   public void testCloudSolrStreamWithZkHost() throws Exception {
 
@@ -5035,6 +5038,58 @@ public class StreamExpressionTest extends SolrCloudTestCase {
   }
 
   @Test
+  public void testListStream() throws Exception {
+    UpdateRequest updateRequest = new UpdateRequest();
+    updateRequest.add(id, "hello", "test_t", "l b c d c");
+    updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+    String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=id, sort=\"id desc\")";
+    String cat = "list("+expr+","+expr+")";
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cat);
+    paramsLoc.set("qt", "/stream");
+
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 2);
+    String s = (String)tuples.get(0).get("id");
+    assertTrue(s.equals("hello"));
+    s = (String)tuples.get(1).get("id");
+    assertTrue(s.equals("hello"));
+
+  }
+
+  @Test
+  public void testCellStream() throws Exception {
+    UpdateRequest updateRequest = new UpdateRequest();
+    updateRequest.add(id, "hello", "test_t", "l b c d c");
+    updateRequest.add(id, "hello1", "test_t", "l b c d c");
+
+    updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
+
+    String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=id, sort=\"id desc\")";
+    String cat = "cell(results,"+expr+")";
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cat);
+    paramsLoc.set("qt", "/stream");
+
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 1);
+    List<Map> results  = (List<Map>)tuples.get(0).get("results");
+    assertTrue(results.get(0).get("id").equals("hello1"));
+    assertTrue(results.get(1).get("id").equals("hello"));
+  }
+
+  @Test
   public void testConvertEvaluator() throws Exception {
 
     UpdateRequest updateRequest = new UpdateRequest();


[08/17] lucene-solr:jira/solr-8668: don't allow ExtrasFS for this test case

Posted by cp...@apache.org.
don't allow ExtrasFS for this test case


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/487e0852
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/487e0852
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/487e0852

Branch: refs/heads/jira/solr-8668
Commit: 487e085264de240c464d0a74edf0d4db26d2cffa
Parents: a07ac63
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Apr 22 18:45:27 2017 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Apr 22 18:45:27 2017 -0400

----------------------------------------------------------------------
 lucene/core/src/test/org/apache/lucene/store/TestDirectory.java | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/487e0852/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
index d3489ad..23c1bcd 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
@@ -14,8 +14,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.store;
 
+package org.apache.lucene.store;
 
 import java.io.IOException;
 import java.nio.file.Files;
@@ -26,6 +26,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.lucene.mockfile.ExtrasFS;
 import org.apache.lucene.util.LuceneTestCase;
 
 public class TestDirectory extends LuceneTestCase {
@@ -142,6 +143,7 @@ public class TestDirectory extends LuceneTestCase {
 
   public void testListAll() throws Throwable {
     Path dir = createTempDir("testdir");
+    assumeFalse("this test does not expect extra files", dir.getFileSystem().provider() instanceof ExtrasFS);
     Path file1 = Files.createFile(dir.resolve("tempfile1"));
     Path file2 = Files.createFile(dir.resolve("tempfile2"));
     Set<String> files = new HashSet<>(Arrays.asList(FSDirectory.listAll(dir)));


[10/17] lucene-solr:jira/solr-8668: SOLR-10047: Move test into its own test class and force use of NoMergePolicy to fix test failures

Posted by cp...@apache.org.
SOLR-10047: Move test into its own test class and force use of NoMergePolicy to fix test failures

This closes #195


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/56e1ad48
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/56e1ad48
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/56e1ad48

Branch: refs/heads/jira/solr-8668
Commit: 56e1ad484a2c7431932a95e442bc1a584f60698e
Parents: c09d82e
Author: Shalin Shekhar Mangar <sh...@apache.org>
Authored: Tue Apr 25 00:26:21 2017 +0530
Committer: Shalin Shekhar Mangar <sh...@apache.org>
Committed: Tue Apr 25 00:26:21 2017 +0530

----------------------------------------------------------------------
 .../org/apache/solr/schema/DocValuesTest.java   |  72 ----------
 .../solr/schema/TestHalfAndHalfDocValues.java   | 136 +++++++++++++++++++
 2 files changed, 136 insertions(+), 72 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/56e1ad48/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
index 2d8afee..cf43a68 100644
--- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java
@@ -25,14 +25,10 @@ import java.util.List;
 import java.util.function.Function;
 import java.util.function.Supplier;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.DocValuesType;
 import org.apache.lucene.index.FieldInfos;
 import org.apache.lucene.index.IndexableField;
 import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MultiFields;
 import org.apache.lucene.index.NumericDocValues;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.queries.function.FunctionValues;
@@ -155,74 +151,6 @@ public class DocValuesTest extends SolrTestCaseJ4 {
     }
   }
 
-
-  public void testHalfAndHalfDocValues() throws Exception {
-    // Insert two docs without docvalues
-    String fieldname = "string_add_dv_later";
-    assertU(adoc("id", "3", fieldname, "c"));
-    assertU(commit());
-    assertU(adoc("id", "1", fieldname, "a"));
-    assertU(commit());
-    
-   
-    try (SolrCore core = h.getCoreInc()) {
-        assertFalse(core.getLatestSchema().getField(fieldname).hasDocValues());
-      // Add docvalues to the field type
-      IndexSchema schema = core.getLatestSchema();
-      SchemaField oldField = schema.getField(fieldname);
-      int newProperties = oldField.getProperties() | SchemaField.DOC_VALUES;
-      
-      SchemaField sf = new SchemaField( fieldname, oldField.getType(), newProperties, null);
-      schema.getFields().put( fieldname, sf );
-      
-      // Insert a new doc with docvalues
-      assertU(adoc("id", "2", fieldname, "b"));
-      assertU(commit());
-    
-    
-      // Check there are a mix of segments with and without docvalues
-      final RefCounted<SolrIndexSearcher> searcherRef = core.openNewSearcher(true, true);
-      final SolrIndexSearcher searcher = searcherRef.get();
-      try {
-        final DirectoryReader topReader = searcher.getRawReader();
-
-        //Assert no merges
-        
-        assertEquals(3, topReader.numDocs());
-        assertEquals(3, topReader.leaves().size());
-        
-        final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
-        //The global field type should have docValues because a document with dvs was added
-        assertEquals(DocValuesType.SORTED, infos.fieldInfo(fieldname).getDocValuesType());
-        
-        for(LeafReaderContext ctx: topReader.leaves()) {
-          LeafReader r = ctx.reader();
-          //Make sure there were no merges
-          assertEquals(1, r.numDocs());
-          Document doc = r.document(0);
-          String id = doc.getField("id").stringValue();
-          
-          if(id.equals("1") || id.equals("3")) {
-            assertEquals(DocValuesType.NONE, r.getFieldInfos().fieldInfo(fieldname).getDocValuesType());
-          } else {
-            assertEquals(DocValuesType.SORTED, r.getFieldInfos().fieldInfo(fieldname).getDocValuesType());
-          }
-          
-        }
-      } finally {
-        searcherRef.decref();
-      }
-    }
-    
-    // Assert sort order is correct
-    assertQ(req("q", "string_add_dv_later:*", "sort", "string_add_dv_later asc"),
-        "//*[@numFound='3']",
-        "//result/doc[1]/int[@name='id'][.=1]",
-        "//result/doc[2]/int[@name='id'][.=2]",
-        "//result/doc[3]/int[@name='id'][.=3]"
-    );
-  }
-
   private void tstToObj(SchemaField sf, Object o) {
     List<IndexableField> fields = sf.createFields(o);
     for (IndexableField field : fields) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/56e1ad48/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java b/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
new file mode 100644
index 0000000..b0c3956
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/schema/TestHalfAndHalfDocValues.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.schema;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiFields;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.index.NoMergePolicyFactory;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.RefCounted;
+import org.junit.BeforeClass;
+
+/**
+ * Added in SOLR-10047
+ */
+public class TestHalfAndHalfDocValues extends SolrTestCaseJ4 {
+
+  @BeforeClass
+  public static void beforeTests() throws Exception {
+    // we need consistent segments that aren't merged because we want to have
+    // segments with and without docvalues
+    systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName());
+
+    // HACK: Don't use a RandomMergePolicy, but only use the mergePolicyFactory that we've just set
+    System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICYFACTORY, "true");
+    System.setProperty(SYSTEM_PROPERTY_SOLR_TESTS_USEMERGEPOLICY, "false");
+
+    initCore("solrconfig-basic.xml", "schema-docValues.xml");
+
+    // sanity check our schema meets our expectations
+    final IndexSchema schema = h.getCore().getLatestSchema();
+    for (String f : new String[]{"floatdv", "intdv", "doubledv", "longdv", "datedv", "stringdv", "booldv"}) {
+      final SchemaField sf = schema.getField(f);
+      assertFalse(f + " is multiValued, test is useless, who changed the schema?",
+          sf.multiValued());
+      assertFalse(f + " is indexed, test is useless, who changed the schema?",
+          sf.indexed());
+      assertTrue(f + " has no docValues, test is useless, who changed the schema?",
+          sf.hasDocValues());
+    }
+  }
+
+  public void setUp() throws Exception {
+    super.setUp();
+    assertU(delQ("*:*"));
+  }
+
+  public void testHalfAndHalfDocValues() throws Exception {
+    // Insert two docs without docvalues
+    String fieldname = "string_add_dv_later";
+    assertU(adoc("id", "3", fieldname, "c"));
+    assertU(commit());
+    assertU(adoc("id", "1", fieldname, "a"));
+    assertU(commit());
+
+
+    try (SolrCore core = h.getCoreInc()) {
+      assertFalse(core.getLatestSchema().getField(fieldname).hasDocValues());
+      // Add docvalues to the field type
+      IndexSchema schema = core.getLatestSchema();
+      SchemaField oldField = schema.getField(fieldname);
+      int newProperties = oldField.getProperties() | SchemaField.DOC_VALUES;
+
+      SchemaField sf = new SchemaField(fieldname, oldField.getType(), newProperties, null);
+      schema.getFields().put(fieldname, sf);
+
+      // Insert a new doc with docvalues
+      assertU(adoc("id", "2", fieldname, "b"));
+      assertU(commit());
+
+
+      // Check there are a mix of segments with and without docvalues
+      final RefCounted<SolrIndexSearcher> searcherRef = core.openNewSearcher(true, true);
+      final SolrIndexSearcher searcher = searcherRef.get();
+      try {
+        final DirectoryReader topReader = searcher.getRawReader();
+
+        //Assert no merges
+
+        assertEquals(3, topReader.numDocs());
+        assertEquals(3, topReader.leaves().size());
+
+        final FieldInfos infos = MultiFields.getMergedFieldInfos(topReader);
+        //The global field type should have docValues because a document with dvs was added
+        assertEquals(DocValuesType.SORTED, infos.fieldInfo(fieldname).getDocValuesType());
+
+        for (LeafReaderContext ctx : topReader.leaves()) {
+          LeafReader r = ctx.reader();
+          //Make sure there were no merges
+          assertEquals(1, r.numDocs());
+          Document doc = r.document(0);
+          String id = doc.getField("id").stringValue();
+
+          if (id.equals("1") || id.equals("3")) {
+            assertEquals(DocValuesType.NONE, r.getFieldInfos().fieldInfo(fieldname).getDocValuesType());
+          } else {
+            assertEquals(DocValuesType.SORTED, r.getFieldInfos().fieldInfo(fieldname).getDocValuesType());
+          }
+
+        }
+      } finally {
+        searcherRef.decref();
+      }
+    }
+
+    // Assert sort order is correct
+    assertQ(req("q", "string_add_dv_later:*", "sort", "string_add_dv_later asc"),
+        "//*[@numFound='3']",
+        "//result/doc[1]/int[@name='id'][.=1]",
+        "//result/doc[2]/int[@name='id'][.=2]",
+        "//result/doc[3]/int[@name='id'][.=3]"
+    );
+  }
+
+}


[04/17] lucene-solr:jira/solr-8668: LUCENE-7797: the static FSDirectory.listAll was always returning an empty array

Posted by cp...@apache.org.
LUCENE-7797: the static FSDirectory.listAll was always returning an empty array


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4cd83ea2
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4cd83ea2
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4cd83ea2

Branch: refs/heads/jira/solr-8668
Commit: 4cd83ea276a72a4e6a3a46a3dd49a84bffb07fe7
Parents: 3316f47
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Apr 22 08:49:34 2017 -0400
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Apr 22 08:49:34 2017 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                                     |  3 +++
 .../src/java/org/apache/lucene/store/FSDirectory.java  |  2 +-
 .../test/org/apache/lucene/store/TestDirectory.java    | 13 +++++++++++++
 3 files changed, 17 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cd83ea2/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 3cee960..aeeebd6 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -97,6 +97,9 @@ Bug Fixes
   ArrayIndexOutOfBoundsException when byte blocks larger than 32 KB
   were added (Mike McCandless)
 
+* LUCENE-7797: The static FSDirectory.listAll(Path) method was always
+  returning an empty array.  (Atkins Chang via Mike McCandless)
+
 Improvements
 
 * LUCENE-7782: OfflineSorter now passes the total number of items it

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cd83ea2/lucene/core/src/java/org/apache/lucene/store/FSDirectory.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/store/FSDirectory.java b/lucene/core/src/java/org/apache/lucene/store/FSDirectory.java
index 50b52ce..158a2e2 100644
--- a/lucene/core/src/java/org/apache/lucene/store/FSDirectory.java
+++ b/lucene/core/src/java/org/apache/lucene/store/FSDirectory.java
@@ -215,7 +215,7 @@ public abstract class FSDirectory extends BaseDirectory {
     try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) {
       for (Path path : stream) {
         String name = path.getFileName().toString();
-        if (skipNames != null && skipNames.contains(name) == false) {
+        if (skipNames == null || skipNames.contains(name) == false) {
           entries.add(name);
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4cd83ea2/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
index 5e4a593..d3489ad 100644
--- a/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
+++ b/lucene/core/src/test/org/apache/lucene/store/TestDirectory.java
@@ -22,7 +22,9 @@ import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.lucene.util.LuceneTestCase;
 
@@ -137,5 +139,16 @@ public class TestDirectory extends LuceneTestCase {
       fsDir.close();
     }
   }
+
+  public void testListAll() throws Throwable {
+    Path dir = createTempDir("testdir");
+    Path file1 = Files.createFile(dir.resolve("tempfile1"));
+    Path file2 = Files.createFile(dir.resolve("tempfile2"));
+    Set<String> files = new HashSet<>(Arrays.asList(FSDirectory.listAll(dir)));
+
+    assertTrue(files.size() == 2);
+    assertTrue(files.contains(file1.getFileName().toString()));
+    assertTrue(files.contains(file2.getFileName().toString()));
+  }
 }
 


[07/17] lucene-solr:jira/solr-8668: SOLR-9217: delay JoinUtil call to createWeight for score join

Posted by cp...@apache.org.
SOLR-9217: delay JoinUtil call to createWeight for score join


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/a07ac633
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/a07ac633
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/a07ac633

Branch: refs/heads/jira/solr-8668
Commit: a07ac63357c3ecd817e85a5f392a558709998d05
Parents: 99ce234
Author: Mikhail Khludnev <mk...@apache.org>
Authored: Sun Apr 23 00:28:20 2017 +0300
Committer: Mikhail Khludnev <mk...@apache.org>
Committed: Sun Apr 23 01:30:23 2017 +0300

----------------------------------------------------------------------
 solr/CHANGES.txt                                         |  3 +++
 .../apache/solr/search/join/ScoreJoinQParserPlugin.java  | 11 ++++++-----
 .../apache/solr/search/join/TestScoreJoinQPNoScore.java  |  7 +++----
 3 files changed, 12 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a07ac633/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index c1bede4..5ba38d9 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -187,6 +187,9 @@ Optimizations
 * SOLR-10499: facet.heatmap is now significantly faster when the docset (base query) matches everything and there are no
   deleted docs.  It's also faster when the docset matches a small fraction of the index or none. (David Smiley)
 
+* SOLR-9217: Reduced heap consumption for filter({!join ... score=...}) 
+  (Andrey Kudryavtsev, Gopikannan Venugopalsamy via Mikhail Khludnev)
+
 Bug Fixes
 ----------------------
 * SOLR-10281: ADMIN_PATHS is duplicated in two places and inconsistent. This can cause automatic

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a07ac633/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
index edccc88..a49195c 100644
--- a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java
@@ -21,8 +21,9 @@ import java.util.Map;
 import java.util.Objects;
 
 import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Weight;
 import org.apache.lucene.search.join.JoinUtil;
 import org.apache.lucene.search.join.ScoreMode;
 import org.apache.solr.cloud.ZkController;
@@ -86,7 +87,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
     }
 
     @Override
-    public Query rewrite(IndexReader reader) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
 
       CoreContainer container = info.getReq().getCore().getCoreContainer();
@@ -106,7 +107,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
         fromCore.close();
         fromHolder.decref();
       }
-      return joinQuery.rewrite(reader);
+      return joinQuery.rewrite(searcher.getIndexReader()).createWeight(searcher, needsScores, boost);
     }
 
     @Override
@@ -156,11 +157,11 @@ public class ScoreJoinQParserPlugin extends QParserPlugin {
     }
 
     @Override
-    public Query rewrite(IndexReader reader) throws IOException {
+    public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
       SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
       final Query jq = JoinUtil.createJoinQuery(fromField, true,
           toField, fromQuery, info.getReq().getSearcher(), scoreMode);
-      return jq.rewrite(reader);
+      return jq.rewrite(searcher.getIndexReader()).createWeight(searcher, needsScores, boost);
     }
 
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/a07ac633/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java
index 3b23be8..0d9801e 100644
--- a/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java
+++ b/solr/core/src/test/org/apache/solr/search/join/TestScoreJoinQPNoScore.java
@@ -170,10 +170,9 @@ public class TestScoreJoinQPNoScore extends SolrTestCaseJ4 {
       {
         final Query query = QParser.getParser(req.getParams().get("q"), req).getQuery();
         final Query rewrittenQuery = query.rewrite(req.getSearcher().getIndexReader());
-        assertTrue(
-            rewrittenQuery+" should be Lucene's",
-            rewrittenQuery.getClass().getPackage().getName()
-            .startsWith("org.apache.lucene"));
+        assertEquals(rewrittenQuery+" is expected to be from Solr",
+            ScoreJoinQParserPlugin.class.getPackage().getName(), 
+            rewrittenQuery.getClass().getPackage().getName());
       }
       {
         final Query query = QParser.getParser(


[06/17] lucene-solr:jira/solr-8668: SOLR-10551: Improve tests

Posted by cp...@apache.org.
SOLR-10551: Improve tests


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/99ce2341
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/99ce2341
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/99ce2341

Branch: refs/heads/jira/solr-8668
Commit: 99ce2341628ea59f4821e3314e55758a2a406512
Parents: f6af8d0
Author: Joel Bernstein <jb...@apache.org>
Authored: Sat Apr 22 17:26:19 2017 -0400
Committer: Joel Bernstein <jb...@apache.org>
Committed: Sat Apr 22 17:26:19 2017 -0400

----------------------------------------------------------------------
 .../solr/client/solrj/io/stream/CellStream.java |  1 +
 .../solrj/io/stream/StreamExpressionTest.java   | 24 ++++++++++++++------
 2 files changed, 18 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99ce2341/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
index 1696a1a..aad99f6 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CellStream.java
@@ -69,6 +69,7 @@ public class CellStream extends TupleStream implements Expressible {
   private StreamExpression toExpression(StreamFactory factory, boolean includeStreams) throws IOException {
     // function name
     StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
+    expression.addParameter(name);
     if(includeStreams) {
       expression.addParameter(((Expressible)stream).toExpression(factory));
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/99ce2341/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index 2505639..bb771b6 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -5041,10 +5041,16 @@ public class StreamExpressionTest extends SolrCloudTestCase {
   public void testListStream() throws Exception {
     UpdateRequest updateRequest = new UpdateRequest();
     updateRequest.add(id, "hello", "test_t", "l b c d c");
+    updateRequest.add(id, "hello1", "test_t", "l b c d c");
+    updateRequest.add(id, "hello2", "test_t", "l b c d c");
+
     updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=id, sort=\"id desc\")";
-    String cat = "list("+expr+","+expr+")";
+    String expr1 = "search("+COLLECTIONORALIAS+", q=\"id:hello\",  fl=id, sort=\"id desc\")";
+    String expr2 = "search("+COLLECTIONORALIAS+", q=\"id:hello1\", fl=id, sort=\"id desc\")";
+    String expr3 = "search("+COLLECTIONORALIAS+", q=\"id:hello2\", fl=id, sort=\"id desc\")";
+
+    String cat = "list("+expr1+","+expr2+","+expr3+")";
     ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", cat);
     paramsLoc.set("qt", "/stream");
@@ -5055,23 +5061,24 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     StreamContext context = new StreamContext();
     solrStream.setStreamContext(context);
     List<Tuple> tuples = getTuples(solrStream);
-    assertTrue(tuples.size() == 2);
+    assertTrue(tuples.size() == 3);
     String s = (String)tuples.get(0).get("id");
     assertTrue(s.equals("hello"));
     s = (String)tuples.get(1).get("id");
-    assertTrue(s.equals("hello"));
-
+    assertTrue(s.equals("hello1"));
+    s = (String)tuples.get(2).get("id");
+    assertTrue(s.equals("hello2"));
   }
 
   @Test
   public void testCellStream() throws Exception {
     UpdateRequest updateRequest = new UpdateRequest();
-    updateRequest.add(id, "hello", "test_t", "l b c d c");
+    updateRequest.add(id, "hello", "test_t", "l b c d c e");
     updateRequest.add(id, "hello1", "test_t", "l b c d c");
 
     updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS);
 
-    String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=id, sort=\"id desc\")";
+    String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=\"id,test_t\", sort=\"id desc\")";
     String cat = "cell(results,"+expr+")";
     ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", cat);
@@ -5086,7 +5093,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(tuples.size() == 1);
     List<Map> results  = (List<Map>)tuples.get(0).get("results");
     assertTrue(results.get(0).get("id").equals("hello1"));
+    assertTrue(results.get(0).get("test_t").equals("l b c d c"));
     assertTrue(results.get(1).get("id").equals("hello"));
+    assertTrue(results.get(1).get("test_t").equals("l b c d c e"));
+
   }
 
   @Test