You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ab...@apache.org on 2017/07/03 09:36:47 UTC

[07/59] [abbrv] lucene-solr:jira/solr-10878: SOLR-10123: Upgraded the Analytics Component to version 2.0

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataArrayWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataArrayWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataArrayWriter.java
new file mode 100644
index 0000000..12fc86e
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataArrayWriter.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.IntSupplier;
+import java.util.function.LongSupplier;
+
+public class LongDataArrayWriter extends ReductionDataArrayWriter<LongSupplier> {
+
+  public LongDataArrayWriter(DataOutput output, LongSupplier extractor, IntSupplier sizeSupplier) {
+    super(output, extractor, sizeSupplier);
+  }
+  
+  @Override
+  public void write(int size) throws IOException {
+    for (int i = 0; i < size; ++i) {
+      output.writeLong(extractor.getAsLong());
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataWriter.java
new file mode 100644
index 0000000..3b8af52
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/LongDataWriter.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.LongSupplier;
+
+public class LongDataWriter extends ReductionDataWriter<LongSupplier> {
+  
+  public LongDataWriter(DataOutput output, LongSupplier extractor) {
+    super(output, extractor);
+  }
+
+  @Override
+  public void write() throws IOException {
+    output.writeLong(extractor.getAsLong());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionCheckedDataWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionCheckedDataWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionCheckedDataWriter.java
new file mode 100644
index 0000000..a5a2273
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionCheckedDataWriter.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.BooleanSupplier;
+
+/**
+ * Abstract class to manage the extraction and writing of data to a {@link DataOutput} stream.
+ * The data being written may not exist, so the writer first writes whether the data exists before writing the data.
+ */
+public abstract class ReductionCheckedDataWriter<C> extends ReductionDataWriter<C> {
+  private final BooleanSupplier existsSupplier;
+  
+  public ReductionCheckedDataWriter(DataOutput output, C extractor, BooleanSupplier existsSupplier) {
+    super(output, extractor);
+    
+    this.existsSupplier = existsSupplier;
+  }
+  
+  /**
+   * Write a piece of data, retrieved from the extractor, to the output stream.
+   * <br>
+   * First writes whether the data exists, then if it does exists writes the data.
+   * 
+   * @throws IOException if an exception occurs while writing to the output stream
+   */
+  @Override
+  public void write() throws IOException {
+    boolean exists = existsSupplier.getAsBoolean();
+    output.writeBoolean(exists);
+    if (exists) {
+      checkedWrite();
+    }
+  }
+  
+  /**
+   * Write a piece of data, retrieved from the extractor, to the output stream.
+   * <br>
+   * The data being written is guaranteed to exist.
+   * 
+   * @throws IOException if an exception occurs while writing to the output stream
+   */
+  protected abstract void checkedWrite() throws IOException;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataArrayWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataArrayWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataArrayWriter.java
new file mode 100644
index 0000000..29ba77e
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataArrayWriter.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.IntSupplier;
+
+/**
+ * Abstract class to manage the extraction and writing of array data to a {@link DataOutput} stream.
+ */
+public abstract class ReductionDataArrayWriter<C> extends ReductionDataWriter<C> {
+  private final IntSupplier sizeSupplier;
+  
+  public ReductionDataArrayWriter(DataOutput output, C extractor, IntSupplier sizeSupplier) {
+    super(output, extractor);
+    
+    this.sizeSupplier = sizeSupplier;
+  }
+  
+  /**
+   * Write an array of data, retrieved from the extractor, and its size, received from the sizeSupplier, to the output stream.
+   * 
+   * @throws IOException if an exception occurs while writing to the output stream
+   */
+  @Override
+  public void write() throws IOException {
+    int size = sizeSupplier.getAsInt();
+    output.writeInt(size);
+    write(size);
+  }
+  
+  /**
+   * Write an array of data, retrieved from the extractor, with the given size to the output stream.
+   * 
+   * @throws IOException if an exception occurs while writing to the output stream
+   */
+  protected abstract void write(int size) throws IOException;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataWriter.java
new file mode 100644
index 0000000..504a2be
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/ReductionDataWriter.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+/**
+ * Abstract public class to manage the extraction and writing of data to a {@link DataOutput} stream.
+ */
+public abstract class ReductionDataWriter<E> {
+  protected final DataOutput output;
+  protected final E extractor;
+  
+  public ReductionDataWriter(DataOutput output, E extractor) {
+    this.output = output;
+    this.extractor = extractor;
+  }
+
+  /**
+   * Write a piece of data, retrieved from the extractor, to the output stream.
+   * 
+   * @throws IOException if an exception occurs while writing to the output stream
+   */
+  public abstract void write() throws IOException;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringCheckedDataWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringCheckedDataWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringCheckedDataWriter.java
new file mode 100644
index 0000000..6560a8f
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringCheckedDataWriter.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.BooleanSupplier;
+import java.util.function.Supplier;
+
+public class StringCheckedDataWriter extends ReductionCheckedDataWriter<Supplier<String>> {
+  
+  public StringCheckedDataWriter(DataOutput output, Supplier<String> extractor, BooleanSupplier existsSupplier) {
+    super(output, extractor, existsSupplier);
+  }
+
+  @Override
+  public void checkedWrite() throws IOException {
+    output.writeUTF(extractor.get());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataArrayWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataArrayWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataArrayWriter.java
new file mode 100644
index 0000000..18c71d1
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataArrayWriter.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.IntSupplier;
+import java.util.function.Supplier;
+
+public class StringDataArrayWriter extends ReductionDataArrayWriter<Supplier<String>> {
+
+  public StringDataArrayWriter(DataOutput output, Supplier<String> extractor, IntSupplier sizeSupplier) {
+    super(output, extractor, sizeSupplier);
+  }
+  
+  @Override
+  public void write(int size) throws IOException {
+    for (int i = 0; i < size; ++i) {
+      output.writeUTF(extractor.get());
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataWriter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataWriter.java
new file mode 100644
index 0000000..4aac07c
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/StringDataWriter.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.function.Supplier;
+
+public class StringDataWriter extends ReductionDataWriter<Supplier<String>> {
+  
+  public StringDataWriter(DataOutput output, Supplier<String> extractor) {
+    super(output, extractor);
+  }
+
+  @Override
+  public void write() throws IOException {
+    String temp = extractor.get();
+    output.writeBoolean(temp != null);
+    if (temp != null) {
+      output.writeUTF(temp);
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/package-info.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/package-info.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/package-info.java
new file mode 100644
index 0000000..53a5168
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/stream/reservation/write/package-info.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+/** 
+ * Writing classes for a single type of data being stored by one Reduction Data Collector.
+ * These writers are used to export data between shards during the streaming process.
+ */
+package org.apache.solr.analytics.stream.reservation.write;
+
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParams.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParams.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParams.java
deleted file mode 100644
index f6716ff..0000000
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParams.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.analytics.util;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
-import org.apache.solr.common.params.FacetParams.FacetRangeOther;
-import org.apache.solr.search.function.ConcatStringFunction;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
-public interface AnalyticsParams {
-  // Full length Analytics Params
-  public static final String ANALYTICS = "olap";
-  
-  public static final String REQUEST = "o|olap";
-
-  public static final String EXPRESSION = "s|stat|statistic";
-  public static final String HIDDEN_EXPRESSION = "hs|hiddenstat|hiddenstatistic";
-
-  public static final String FIELD_FACET = "ff|fieldfacet";
-  public static final String LIMIT = "l|limit";
-  public static final String OFFSET = "off|offset";
-  public static final String HIDDEN = "h|hidden";
-  public static final String SHOW_MISSING = "sm|showmissing";
-  public static final String SORT_STATISTIC ="ss|sortstat|sortstatistic";
-  public static final String SORT_DIRECTION ="sd|sortdirection";
-  
-  public static final String RANGE_FACET = "rf|rangefacet";
-  public static final String START = "st|start";
-  public static final String END = "e|end";
-  public static final String GAP = "g|gap";
-  public static final String HARDEND = "he|hardend";
-  public static final String INCLUDE_BOUNDARY = "ib|includebound";
-  public static final String OTHER_RANGE = "or|otherrange";
-  
-  public static final String QUERY_FACET = "qf|queryfacet";
-  public static final String DEPENDENCY = "d|dependecy";
-  public static final String QUERY = "q|query";
-  
-  //Defaults
-  public static final boolean DEFAULT_ABBREVIATE_PREFIX = true;
-  public static final String DEFAULT_SORT_DIRECTION = "ascending";
-  public static final int DEFAULT_LIMIT = -1;
-  public static final boolean DEFAULT_HIDDEN = false;
-  public static final boolean DEFAULT_HARDEND = false;
-  public static final boolean DEFAULT_SHOW_MISSING = false;
-  public static final FacetRangeInclude DEFAULT_INCLUDE = FacetRangeInclude.LOWER;
-  public static final FacetRangeOther DEFAULT_OTHER = FacetRangeOther.NONE;
-  
-  // Statistic Function Names (Cannot share names with ValueSource & Expression Functions)
-  public static final String STAT_COUNT = "count";
-  public static final String STAT_MISSING = "missing";
-  public static final String STAT_SUM = "sum";
-  public static final String STAT_SUM_OF_SQUARES = "sumofsquares";
-  public static final String STAT_STANDARD_DEVIATION = "stddev";
-  public static final String STAT_MEAN = "mean";
-  public static final String STAT_UNIQUE = "unique";
-  public static final String STAT_MEDIAN = "median";
-  public static final String STAT_PERCENTILE = "percentile";
-  public static final String STAT_MIN = "min";
-  public static final String STAT_MAX = "max";
-  
-  public static final List<String> ALL_STAT_LIST = Collections.unmodifiableList(Lists.newArrayList(STAT_COUNT, STAT_MISSING, STAT_SUM, STAT_SUM_OF_SQUARES, STAT_STANDARD_DEVIATION, STAT_MEAN, STAT_UNIQUE, STAT_MEDIAN, STAT_PERCENTILE,STAT_MIN,STAT_MAX));
-  public static final Set<String> ALL_STAT_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(ALL_STAT_LIST));
-
-  // ValueSource & Expression Function Names (Cannot share names with Statistic Functions)
-  // No specific type
-  final static String FILTER = "filter";
-  final static String RESULT = "result";
-  final static String QUERY_RESULT = "qresult";
-  
-  // Numbers
-  final static String CONSTANT_NUMBER = "const_num";
-  final static String NEGATE = "neg";
-  final static String ABSOLUTE_VALUE = "abs";
-  final static String LOG = "log";
-  final static String ADD = "add";
-  final static String MULTIPLY = "mult";
-  final static String DIVIDE = "div";
-  final static String POWER = "pow";
-  public static final Set<String> NUMERIC_OPERATION_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(Lists.newArrayList(CONSTANT_NUMBER,NEGATE,ABSOLUTE_VALUE,LOG,ADD,MULTIPLY,DIVIDE,POWER)));
-  
-  // Dates
-  final static String CONSTANT_DATE = "const_date";
-  final static String DATE_MATH = "date_math";
-  public static final Set<String> DATE_OPERATION_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(Lists.newArrayList(CONSTANT_DATE,DATE_MATH)));
-  
-  //Strings
-  final static String CONSTANT_STRING = "const_str";
-  final static String REVERSE = "rev";
-  final static String CONCATENATE = ConcatStringFunction.NAME;
-  public static final Set<String> STRING_OPERATION_SET = Collections.unmodifiableSet(Sets.newLinkedHashSet(Lists.newArrayList(CONSTANT_STRING,REVERSE,CONCATENATE)));
-  
-  // Field Source Wrappers
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java
deleted file mode 100644
index dd64c3f..0000000
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsParsers.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.analytics.util;
-
-import java.io.IOException;
-import java.time.Instant;
-import java.util.Arrays;
-
-import org.apache.solr.legacy.LegacyNumericUtils;
-import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.NumericUtils;
-import org.apache.solr.schema.FieldType;
-import org.apache.solr.schema.TrieDateField;
-import org.apache.solr.schema.TrieDoubleField;
-import org.apache.solr.schema.TrieFloatField;
-import org.apache.solr.schema.TrieIntField;
-import org.apache.solr.schema.TrieLongField;
-
-/** 
- * Class to hold the parsers used for Solr Analytics.
- */
-public class AnalyticsParsers {
-
-  /**
-   * Returns a parser that will translate a BytesRef or long from DocValues into 
-   * a String that correctly represents the value.
-   * @param class1 class of the FieldType of the field being faceted on.
-   * @return A Parser
-   */
-  public static Parser getParser(Class<? extends FieldType> class1) {
-    if (class1.equals(TrieIntField.class)) {
-      return AnalyticsParsers.INT_DOC_VALUES_PARSER;
-    } else if (class1.equals(TrieLongField.class)) {
-      return AnalyticsParsers.LONG_DOC_VALUES_PARSER;
-    } else if (class1.equals(TrieFloatField.class)) {
-      return AnalyticsParsers.FLOAT_DOC_VALUES_PARSER;
-    } else if (class1.equals(TrieDoubleField.class)) {
-      return AnalyticsParsers.DOUBLE_DOC_VALUES_PARSER;
-    } else if (class1.equals(TrieDateField.class)) {
-      return AnalyticsParsers.DATE_DOC_VALUES_PARSER;
-    } else {
-      return AnalyticsParsers.STRING_PARSER;
-    }
-  }
-
-  /**
-   * For use in classes that grab values by docValue.
-   * Converts a BytesRef object into the correct readable text.
-   */
-  public static interface Parser {
-    String parse(BytesRef bytes) throws IOException;
-  }
-  
-  /**
-   * Converts the long returned by NumericDocValues into the
-   * correct number and return it as a string.
-   */
-  public static interface NumericParser extends Parser {
-    String parseNum(long l);
-  }
-  
-  /**
-   * Converts the BytesRef or long to the correct int string.
-   */
-  public static final NumericParser INT_DOC_VALUES_PARSER = new NumericParser() {
-    public String parse(BytesRef bytes) throws IOException {
-      try {
-        return ""+ LegacyNumericUtils.prefixCodedToInt(bytes);
-      } catch (NumberFormatException e) {
-        throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to an int.");
-      }
-    }
-    @Override
-    public String parseNum(long l) {
-      return ""+(int)l;
-    }
-  };
-  
-  /**
-   * Converts the BytesRef or long to the correct long string.
-   */
-  public static final NumericParser LONG_DOC_VALUES_PARSER = new NumericParser() {
-    public String parse(BytesRef bytes) throws IOException {
-      try {
-        return ""+ LegacyNumericUtils.prefixCodedToLong(bytes);
-      } catch (NumberFormatException e) {
-        throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a long.");
-      }
-    }
-    @Override
-    public String parseNum(long l) {
-      return ""+l;
-    }
-  };
-  
-  /**
-   * Converts the BytesRef or long to the correct float string.
-   */
-  public static final NumericParser FLOAT_DOC_VALUES_PARSER = new NumericParser() {
-    public String parse(BytesRef bytes) throws IOException {
-      try {
-        return ""+ NumericUtils.sortableIntToFloat(LegacyNumericUtils.prefixCodedToInt(bytes));
-      } catch (NumberFormatException e) {
-        throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a float.");
-      }
-    }
-    @Override
-    public String parseNum(long l) {
-      return ""+ NumericUtils.sortableIntToFloat((int) l);
-    }
-  };
-  
-  /**
-   * Converts the BytesRef or long to the correct double string.
-   */
-  public static final NumericParser DOUBLE_DOC_VALUES_PARSER = new NumericParser() {
-    public String parse(BytesRef bytes) throws IOException {
-      try {
-        return ""+ NumericUtils.sortableLongToDouble(LegacyNumericUtils.prefixCodedToLong(bytes));
-      } catch (NumberFormatException e) {
-        throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a double.");
-      }
-    }
-    @Override
-    public String parseNum(long l) {
-      return ""+ NumericUtils.sortableLongToDouble(l);
-    }
-  };
-  
-  /**
-   * Converts the BytesRef or long to the correct date string.
-   */
-  public static final NumericParser DATE_DOC_VALUES_PARSER = new NumericParser() {
-    @SuppressWarnings("deprecation")
-    public String parse(BytesRef bytes) throws IOException {
-      try {
-        return Instant.ofEpochMilli(LegacyNumericUtils.prefixCodedToLong(bytes)).toString();
-      } catch (NumberFormatException e) {
-        throw new IOException("The byte array "+Arrays.toString(bytes.bytes)+" cannot be converted to a date.");
-      }
-    }
-    @SuppressWarnings("deprecation")
-    @Override
-    public String parseNum(long l) {
-      return Instant.ofEpochMilli(l).toString();
-    }
-  };
-  
-  /**
-   * Converts the BytesRef to the correct string.
-   */
-  public static final Parser STRING_PARSER = new Parser() {
-    public String parse(BytesRef bytes) {
-      return bytes.utf8ToString();
-    }
-  };
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsResponseHeadings.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsResponseHeadings.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsResponseHeadings.java
new file mode 100644
index 0000000..00e0afb
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/AnalyticsResponseHeadings.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.util;
+
+/**
+ * Holds the headers for analytics responses.
+ */
+public class AnalyticsResponseHeadings {
+
+  public static final String COMPLETED_HEADER = "analytics_response";
+  public static final String RESULTS = "results";
+  public static final String GROUPINGS = "groupings";
+  public static final String FACET_VALUE = "value";
+  public static final String PIVOT_NAME = "pivot";
+  public static final String PIVOT_CHILDREN = "children";
+  
+  // Old Olap-style
+  public static final String COMPLETED_OLD_HEADER = "stats";
+  public static final String FIELD_FACETS = "fieldFacets";
+  public static final String RANGE_FACETS = "rangeFacets";
+  public static final String QUERY_FACETS = "queryFacets";
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java
new file mode 100644
index 0000000..0576096
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/FacetRangeGenerator.java
@@ -0,0 +1,356 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.util;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.solr.analytics.facet.RangeFacet;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
+import org.apache.solr.common.params.FacetParams.FacetRangeOther;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.schema.TrieDateField;
+import org.apache.solr.schema.TrieField;
+import org.apache.solr.util.DateMathParser;
+
+
+/**
+ * Calculates a set of {@link FacetRange}s for a given {@link RangeFacet}.
+ */
+public abstract class FacetRangeGenerator<T extends Comparable<T>> {
+  protected final SchemaField field;
+  protected final RangeFacet rangeFacet;
+  
+  public FacetRangeGenerator(final RangeFacet rangeFacet) {
+    this.field = rangeFacet.getField();
+    this.rangeFacet = rangeFacet;
+  }
+
+  /**
+   * Formats a Range endpoint for use as a range label name in the response.
+   * Default Impl just uses toString()
+   */
+  public String formatValue(final T val) {
+    return val.toString();
+  }
+  
+  /**
+   * Parses a String param into an Range endpoint value throwing 
+   * a useful exception if not possible
+   */
+  public final T getValue(final String rawval) {
+    try {
+      return parseVal(rawval);
+    } catch (Exception e) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't parse value "+rawval+" for field: " + field.getName(), e);
+    }
+  }
+  
+  /**
+   * Parses a String param into an Range endpoint. 
+   * Can throw a low level format exception as needed.
+   */
+  protected abstract T parseVal(final String rawval) throws java.text.ParseException;
+
+  /** 
+   * Parses a String param into a value that represents the gap and 
+   * can be included in the response, throwing 
+   * a useful exception if not possible.
+   *
+   * Note: uses Object as the return type instead of T for things like 
+   * Date where gap is just a DateMathParser string 
+   */
+  public final Object getGap(final String gap) {
+    try {
+      return parseGap(gap);
+    } catch (Exception e) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't parse gap "+gap+" for field: " + field.getName(), e);
+    }
+  }
+
+  /**
+   * Parses a String param into a value that represents the gap and 
+   * can be included in the response. 
+   * Can throw a low level format exception as needed.
+   *
+   * Default Impl calls parseVal
+   */
+  protected Object parseGap(final String rawval) throws java.text.ParseException {
+    return parseVal(rawval);
+  }
+
+  /**
+   * Adds the String gap param to a low Range endpoint value to determine 
+   * the corrisponding high Range endpoint value, throwing 
+   * a useful exception if not possible.
+   */
+  public final T addGap(T value, String gap) {
+    try {
+      return parseAndAddGap(value, gap);
+    } catch (Exception e) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Can't add gap "+gap+" to value " + value + " for field: " + field.getName(), e);
+    }
+  }
+  
+  /**
+   * Adds the String gap param to a low Range endpoint value to determine 
+   * the corrisponding high Range endpoint value.
+   * Can throw a low level format exception as needed.
+   */
+  protected abstract T parseAndAddGap(T value, String gap) throws java.text.ParseException;
+
+  public static class FacetRange {
+    public final String name;
+    public final String lower;
+    public final String upper;
+    public final boolean includeLower;
+    public final boolean includeUpper;
+    private final String facetValue;
+    
+    public FacetRange(String name, String lower, String upper, boolean includeLower, boolean includeUpper) {
+      this.name = name;
+      this.lower = lower;
+      this.upper = upper;
+      this.includeLower = includeLower;
+      this.includeUpper = includeUpper;
+      
+      String value = "(*";
+      if (lower != null) {
+        value = (includeLower ? "[" : "(") + lower;
+      }
+      value += " TO ";
+      if (upper == null) {
+        value += "*)";
+      } else {
+        value += upper + (includeUpper? "]" : ")");
+      }
+      facetValue = value;
+    }
+    
+    @Override
+    public String toString() {
+        return facetValue;
+    }
+  }
+  
+  public List<FacetRange> getRanges(){
+
+    final T start = getValue(rangeFacet.getStart());
+    T end = getValue(rangeFacet.getEnd()); // not final, hardend may change this
+    
+    if( end.compareTo(start) < 0 ){
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "range facet 'end' comes before 'start': "+end+" < "+start);
+    }
+    
+    // explicitly return the gap.  compute this early so we are more 
+    // likely to catch parse errors before attempting math
+    final List<String> gaps = rangeFacet.getGaps();
+    String gap = gaps.get(0);
+    
+    final EnumSet<FacetRangeInclude> include = rangeFacet.getInclude();
+        
+    T low = start;
+    
+    List<FacetRange> ranges = new ArrayList<>();
+    
+    int gapCounter = 0;
+    
+    while (low.compareTo(end) < 0) {
+      if (gapCounter<gaps.size()) {
+        gap = gaps.get(gapCounter++);
+      }
+      T high = addGap(low,gap);
+      if (end.compareTo(high) < 0) {
+        if (rangeFacet.isHardEnd()){
+          high = end;
+        } else {
+          end = high;
+        }
+      }
+      
+      if (high.compareTo(low) < 0) {
+        throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "range facet infinite loop (is gap negative? did the math overflow?)");
+      }
+      
+      if (high.compareTo(low) == 0) {
+        throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high );
+      }
+      
+      final boolean includeLower = (include.contains(FacetRangeInclude.ALL) ||
+                                    include.contains(FacetRangeInclude.LOWER) ||
+                                   (include.contains(FacetRangeInclude.EDGE) && 
+                                   0 == low.compareTo(start)));
+      final boolean includeUpper = (include.contains(FacetRangeInclude.ALL) ||
+                                    include.contains(FacetRangeInclude.UPPER) ||
+                                   (include.contains(FacetRangeInclude.EDGE) && 
+                                   0 == high.compareTo(end)));
+      
+      final String lowS = formatValue(low);
+      final String highS = formatValue(high);
+
+      ranges.add( new FacetRange(lowS,lowS,highS,includeLower,includeUpper) );
+      low = high;
+    }
+    
+    final Set<FacetRangeOther> others = rangeFacet.getOthers();
+    if (null != others && 0 < others.size() ) {
+      
+      // no matter what other values are listed, we don't do
+      // anything if "none" is specified.
+      if( !others.contains(FacetRangeOther.NONE) ) {
+        
+        boolean all = others.contains(FacetRangeOther.ALL);
+
+        if (all || others.contains(FacetRangeOther.BEFORE)) {
+          // include upper bound if "outer" or if first gap doesn't already include it
+          ranges.add( new FacetRange(FacetRangeOther.BEFORE.toString(), 
+                                        null, formatValue(start), false, include.contains(FacetRangeInclude.OUTER) || include.contains(FacetRangeInclude.ALL) ||
+                                                            !(include.contains(FacetRangeInclude.LOWER) || include.contains(FacetRangeInclude.EDGE)) ) );
+          
+        }
+        if (all || others.contains(FacetRangeOther.AFTER)) {
+          // include lower bound if "outer" or if last gap doesn't already include it
+          ranges.add( new FacetRange(FacetRangeOther.AFTER.toString(), 
+                                        formatValue(end), null, include.contains(FacetRangeInclude.OUTER) || include.contains(FacetRangeInclude.ALL) ||
+                                                   !(include.contains(FacetRangeInclude.UPPER) || include.contains(FacetRangeInclude.EDGE)), false) );
+        }
+        if (all || others.contains(FacetRangeOther.BETWEEN)) {
+          ranges.add( new FacetRange(FacetRangeOther.BETWEEN.toString(), formatValue(start), formatValue(end),
+                                        include.contains(FacetRangeInclude.LOWER) || include.contains(FacetRangeInclude.EDGE) || include.contains(FacetRangeInclude.ALL),
+                                        include.contains(FacetRangeInclude.UPPER) || include.contains(FacetRangeInclude.EDGE) || include.contains(FacetRangeInclude.ALL)) );
+        }
+      }
+      
+    }
+  
+    return ranges;
+  }
+  
+  public static FacetRangeGenerator<? extends Comparable<?>> create(RangeFacet rangeFacet){
+    final SchemaField sf = rangeFacet.getField();
+    final FieldType ft = sf.getType();
+    final FacetRangeGenerator<?> calc;
+    if (ft instanceof TrieField) {
+      switch (ft.getNumberType()) {
+        case FLOAT:
+          calc = new FloatFacetRangeGenerator(rangeFacet);
+          break;
+        case DOUBLE:
+          calc = new DoubleFacetRangeGenerator(rangeFacet);
+          break;
+        case INTEGER:
+          calc = new IntegerFacetRangeGenerator(rangeFacet);
+          break;
+        case LONG:
+          calc = new LongFacetRangeGenerator(rangeFacet);
+          break;
+        case DATE:
+          calc = new DateFacetRangeGenerator(rangeFacet, null);
+          break;
+        default:
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on tried field of unexpected type:" + sf.getName());
+      }
+    } else {
+      throw new SolrException (SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on field:" + sf);
+    } 
+    return calc;
+  }
+}
+class IntegerFacetRangeGenerator extends FacetRangeGenerator<Integer> {
+  public IntegerFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); }
+
+  @Override
+  protected Integer parseVal(String rawval) {
+    return Integer.valueOf(rawval);
+  }
+  @Override
+  public Integer parseAndAddGap(Integer value, String gap) {
+    return new Integer(value.intValue() + Integer.valueOf(gap).intValue());
+  }
+}
+class LongFacetRangeGenerator extends FacetRangeGenerator<Long> {
+  public LongFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); }
+
+  @Override
+  protected Long parseVal(String rawval) {
+    return Long.valueOf(rawval);
+  }
+  @Override
+  public Long parseAndAddGap(Long value, String gap) {
+    return new Long(value.longValue() + Long.valueOf(gap).longValue());
+  }
+}
+
+class FloatFacetRangeGenerator extends FacetRangeGenerator<Float> {
+  public FloatFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); }
+
+  @Override
+  protected Float parseVal(String rawval) {
+    return Float.valueOf(rawval);
+  }
+  @Override
+  public Float parseAndAddGap(Float value, String gap) {
+    return new Float(value.floatValue() + Float.valueOf(gap).floatValue());
+  }
+}
+
+class DoubleFacetRangeGenerator extends FacetRangeGenerator<Double> {
+  public DoubleFacetRangeGenerator(final RangeFacet rangeFacet) { super(rangeFacet); }
+
+  @Override
+  protected Double parseVal(String rawval) {
+    return Double.valueOf(rawval);
+  }
+  @Override
+  public Double parseAndAddGap(Double value, String gap) {
+    return new Double(value.doubleValue() + Double.valueOf(gap).doubleValue());
+  }
+}
+class DateFacetRangeGenerator extends FacetRangeGenerator<Date> {
+  private final Date now;
+  public DateFacetRangeGenerator(final RangeFacet rangeFacet, final Date now) { 
+    super(rangeFacet); 
+    this.now = now;
+    if (! (field.getType() instanceof TrieDateField) ) {
+      throw new IllegalArgumentException("SchemaField must use field type extending TrieDateField");
+    }
+  }
+  
+  @Override
+  public String formatValue(Date val) {
+    return val.toInstant().toString();
+  }
+  @Override
+  protected Date parseVal(String rawval) {
+    return DateMathParser.parseMath(now, rawval);
+  }
+  @Override
+  protected Object parseGap(final String rawval) {
+    return rawval;
+  }
+  @Override
+  public Date parseAndAddGap(Date value, String gap) throws java.text.ParseException {
+    final DateMathParser dmp = new DateMathParser();
+    dmp.setNow(value);
+    return dmp.parseMath(gap);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java
index 52935e9..541cff0 100644
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/MedianCalculator.java
@@ -18,6 +18,10 @@ package org.apache.solr.analytics.util;
 
 import java.util.List;
 
+/**
+ * Only used for testing.
+ * Medians are calculated with the {@link OrdinalCalculator} for actual analytics requests.
+ */
 public class MedianCalculator {
 
   /**

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsParams.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsParams.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsParams.java
new file mode 100644
index 0000000..084d997
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsParams.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.util;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsExpressionSortRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsRangeFacetRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsSortRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsValueFacetRequest;
+
+/**
+ * Specifies the format of the old olap-style analytics requests.
+ */
+public interface OldAnalyticsParams {
+  // Old request language
+  public static final String OLD_ANALYTICS = "olap";
+
+  public static final String OLD_PREFIX = "o|olap";
+
+  public static final String OLD_EXPRESSION = "s|stat|statistic";
+  
+  public static class OldRequest {
+    public String name;
+    public Map<String,String> expressions = new HashMap<>();
+    public Map<String,OldFieldFacet> fieldFacets = new HashMap<>();
+    public Map<String,OldRangeFacet> rangeFacets = new HashMap<>();
+    public Map<String,OldQueryFacet> queryFacets = new HashMap<>();
+  }
+
+  public static final String FIELD_FACET = "ff|fieldfacet";
+  public static final String VALUE_FACET = "vf|valuefacet";
+  public static final String LIMIT = "l|limit";
+  public static final String OFFSET = "off|offset";
+  public static final String SHOW_MISSING = "sm|showmissing";
+  public static final String SORT_EXPRESSION ="se|sortexpr|sortexpression";
+  public static final String OLAP_SORT_EXPRESSION ="ss|sortstat|sortstatistic";
+  public static final String SORT_DIRECTION ="sd|sortdirection";
+  
+  public static class OldFieldFacet {
+    public String field;
+    public String showMissing;
+    public String limit;
+    public String offset;
+    public String sortExpr;
+    public String sortDir;
+  }
+  
+  public static class FieldFacetParamParser {
+    public static String regexParamList = LIMIT + "|" + OFFSET + "|" + SHOW_MISSING + "|" + OLAP_SORT_EXPRESSION + "|" + SORT_DIRECTION;
+
+    private static Predicate<String> isLimit = Pattern.compile("^" + LIMIT + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isOffset = Pattern.compile("^" + OFFSET + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isShowMissing = Pattern.compile("^" + SHOW_MISSING + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isSortExpr = Pattern.compile("^" + OLAP_SORT_EXPRESSION + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isSortDir = Pattern.compile("^" + SORT_DIRECTION + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    
+    public static void applyParam(AnalyticsValueFacetRequest facet, String param, String value) {
+      if (isLimit.test(param)) {
+        getSort(facet).limit = Integer.parseInt(value);
+      } else if (isOffset.test(param)) {
+        getSort(facet).offset = Integer.parseInt(value);
+      } else if (isShowMissing.test(param)) {
+        facet.expression = "fillmissing(" + facet.expression + ",\"(MISSING)\")";
+      } else if (isSortExpr.test(param)) {
+        AnalyticsSortRequest sort = getSort(facet);
+        AnalyticsExpressionSortRequest criterion;
+        if (sort.criteria.size() == 0) {
+          criterion = new AnalyticsExpressionSortRequest();
+          sort.criteria.add(criterion);
+        } else {
+          criterion = (AnalyticsExpressionSortRequest) sort.criteria.get(0);
+        }
+        criterion.expression = value;
+      } else if (isSortDir.test(param)) {
+        AnalyticsSortRequest sort = getSort(facet);
+        AnalyticsExpressionSortRequest criterion;
+        if (sort.criteria.size() == 0) {
+          criterion = new AnalyticsExpressionSortRequest();
+          sort.criteria.add(criterion);
+        } else {
+          criterion = (AnalyticsExpressionSortRequest) sort.criteria.get(0);
+        }
+        criterion.direction = value;
+      }
+    }
+    
+    public static AnalyticsSortRequest getSort(AnalyticsValueFacetRequest facet) {
+      if (facet.sort == null) {
+        facet.sort = new AnalyticsSortRequest();
+        facet.sort.criteria = new ArrayList<>();
+      }
+      return facet.sort;
+    }
+  }
+  
+  public static final String RANGE_FACET = "rf|rangefacet";
+  public static final String START = "st|start";
+  public static final String END = "e|end";
+  public static final String GAP = "g|gap";
+  public static final String HARDEND = "he|hardend";
+  public static final String INCLUDE_BOUNDARY = "ib|includebound";
+  public static final String OTHER_RANGE = "or|otherrange";
+  
+  public static class OldRangeFacet {
+    public String field;
+    public String start;
+    public String end;
+    public String gaps;
+    public String hardend;
+    public String[] include;
+    public String[] others;
+  }
+  
+  public static class RangeFacetParamParser {
+    public static String regexParamList = START + "|" + END + "|" + GAP + "|" + HARDEND + "|" + INCLUDE_BOUNDARY + "|" + OTHER_RANGE;
+
+    private static Predicate<String> isStart = Pattern.compile("^" + START + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isEnd = Pattern.compile("^" + END + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isGap = Pattern.compile("^" + GAP + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isHardEnd = Pattern.compile("^" + HARDEND + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isTrue = Pattern.compile("^t|true$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isFalse = Pattern.compile("^f|false$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isInclude = Pattern.compile("^" + INCLUDE_BOUNDARY + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    private static Predicate<String> isOther = Pattern.compile("^" + OTHER_RANGE + "$", Pattern.CASE_INSENSITIVE).asPredicate();
+    
+    public static void applyParam(AnalyticsRangeFacetRequest facet, String param, String[] values) {
+      if (isStart.test(param)) {
+        facet.start = values[0];
+      } else if (isEnd.test(param)) {
+        facet.end = values[0];
+      } else if (isGap.test(param)) {
+        facet.gaps = Arrays.asList(values[0].split(","));
+      } else if (isHardEnd.test(param)) {
+        if (isTrue.test(values[0])) {
+          facet.hardend = true;
+        } else if (isFalse.test(values[0])) {
+          facet.hardend = false;
+        }
+      } else if (isInclude.test(param)) {
+        facet.include = Arrays.asList(values);
+      } else if (isOther.test(param)) {
+        facet.others = Arrays.asList(values);
+      }
+    }
+  }
+  
+  public static class OldQueryFacet {
+    public String name;
+    public String[] queries;
+  }
+  
+  public static final String QUERY_FACET = "qf|queryfacet";
+  public static final String QUERY = "q|query";
+  
+  //Defaults
+  public static final boolean DEFAULT_ABBREVIATE_PREFIX = true;
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsRequestConverter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsRequestConverter.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsRequestConverter.java
new file mode 100644
index 0000000..0124dc8
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OldAnalyticsRequestConverter.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.util;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsGroupingRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsQueryFacetRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsRangeFacetRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsRequest;
+import org.apache.solr.analytics.AnalyticsRequestParser.AnalyticsValueFacetRequest;
+import org.apache.solr.common.params.SolrParams;
+
+/**
+ * Converts Analytics Requests in the old olap-style format to the new format.
+ */
+public class OldAnalyticsRequestConverter implements OldAnalyticsParams {
+  // Old language Parsing
+  private static final Pattern oldExprPattern = 
+      Pattern.compile("^(?:"+OLD_PREFIX+")\\.([^\\.]+)\\.(?:"+OLD_EXPRESSION+")\\.([^\\.]+)$", Pattern.CASE_INSENSITIVE);
+  private static final Pattern oldFieldFacetPattern = 
+      Pattern.compile("^(?:"+OLD_PREFIX+")\\.([^\\.]+)\\.(?:"+FIELD_FACET+")$", Pattern.CASE_INSENSITIVE);
+  private static final Pattern oldFieldFacetParamPattern = 
+      Pattern.compile("^(?:"+OLD_PREFIX+")\\.([^\\.]+)\\.(?:"+FIELD_FACET+")\\.([^\\.]+)\\.("+FieldFacetParamParser.regexParamList+")$", Pattern.CASE_INSENSITIVE);
+  private static final Pattern oldRangeFacetParamPattern =
+      Pattern.compile("^(?:"+OLD_PREFIX+")\\.([^\\.]+)\\.(?:"+RANGE_FACET+")\\.([^\\.]+)\\.("+RangeFacetParamParser.regexParamList+")$", Pattern.CASE_INSENSITIVE);
+  private static final Pattern oldQueryFacetParamPattern = 
+      Pattern.compile("^(?:"+OLD_PREFIX+")\\.([^\\.]+)\\.(?:"+QUERY_FACET+")\\.([^\\.]+)\\.("+QUERY+")$", Pattern.CASE_INSENSITIVE);
+  
+  /**
+   * Convert the old olap-style Analytics Request in the given params to
+   * an analytics request string using the current format.
+   * 
+   * @param params to find the analytics request in
+   * @return an analytics request string
+   */
+  public static AnalyticsRequest convert(SolrParams params) {
+    AnalyticsRequest request = new AnalyticsRequest();
+    request.expressions = new HashMap<>();
+    request.groupings = new HashMap<>();
+    Iterator<String> paramsIterator = params.getParameterNamesIterator();
+    while (paramsIterator.hasNext()) {
+      String param = paramsIterator.next();
+      CharSequence paramSequence = param.subSequence(0, param.length());
+      parseParam(request, param, paramSequence, params);
+    }
+    return request;
+  }
+  
+  private static void parseParam(AnalyticsRequest request, String param, CharSequence paramSequence, SolrParams params) {
+    // Check if grouped expression
+    Matcher m = oldExprPattern.matcher(paramSequence);
+    if (m.matches()) {
+      addExpression(request,m.group(1),m.group(2),params.get(param));
+      return;
+    }
+    
+    // Check if field facet parameter
+    m = oldFieldFacetPattern.matcher(paramSequence);
+    if (m.matches()) {
+      addFieldFacets(request,m.group(1),params.getParams(param));
+      return;
+    }
+    
+    // Check if field facet parameter
+    m = oldFieldFacetParamPattern.matcher(paramSequence);
+    if (m.matches()) {
+      setFieldFacetParam(request,m.group(1),m.group(2),m.group(3),params.getParams(param));
+      return;
+    }
+    
+    // Check if field facet parameter
+    m = oldFieldFacetParamPattern.matcher(paramSequence);
+    if (m.matches()) {
+      setFieldFacetParam(request,m.group(1),m.group(2),m.group(3),params.getParams(param));
+      return;
+    }
+
+    // Check if range facet parameter
+    m = oldRangeFacetParamPattern.matcher(paramSequence);
+    if (m.matches()) {
+      setRangeFacetParam(request,m.group(1),m.group(2),m.group(3),params.getParams(param));
+      return;
+    }
+
+    // Check if query
+    m = oldQueryFacetParamPattern.matcher(paramSequence);
+    if (m.matches()) {
+      setQueryFacetParam(request,m.group(1),m.group(2),m.group(3),params.getParams(param));
+      return;
+    } 
+  }
+  
+  private static AnalyticsGroupingRequest getGrouping(AnalyticsRequest request, String name) {
+    AnalyticsGroupingRequest grouping = request.groupings.get(name);
+    if (grouping == null) {
+      grouping = new AnalyticsGroupingRequest();
+      grouping.expressions = new HashMap<>();
+      grouping.facets = new HashMap<>();
+      request.groupings.put(name, grouping);
+    }
+    return grouping;
+  }
+
+  private static void addFieldFacets(AnalyticsRequest request, String groupingName, String[] params) {
+    AnalyticsGroupingRequest grouping = getGrouping(request, groupingName);
+    
+    for (String param : params) {
+      if (!grouping.facets.containsKey(param)) {
+        AnalyticsValueFacetRequest fieldFacet = new AnalyticsValueFacetRequest();
+        fieldFacet.expression = param;
+        grouping.facets.put(param, fieldFacet);
+      }
+    }
+  }
+
+  private static void setFieldFacetParam(AnalyticsRequest request, String groupingName, String field, String paramType, String[] params) {
+    AnalyticsGroupingRequest grouping = getGrouping(request, groupingName);
+    
+    AnalyticsValueFacetRequest fieldFacet = (AnalyticsValueFacetRequest) grouping.facets.get(field);
+    
+    if (fieldFacet == null) {
+      fieldFacet = new AnalyticsValueFacetRequest();
+      fieldFacet.expression = field;
+      grouping.facets.put(field, fieldFacet);
+    }
+    FieldFacetParamParser.applyParam(fieldFacet, paramType, params[0]);
+  }
+
+  private static void setRangeFacetParam(AnalyticsRequest request, String groupingName, String field, String paramType, String[] params) {
+    AnalyticsGroupingRequest grouping = getGrouping(request, groupingName);
+    
+    AnalyticsRangeFacetRequest rangeFacet = (AnalyticsRangeFacetRequest) grouping.facets.get(field);
+    if (rangeFacet == null) {
+      rangeFacet = new AnalyticsRangeFacetRequest();
+      rangeFacet.field = field;
+      grouping.facets.put(field, rangeFacet);
+    }
+    RangeFacetParamParser.applyParam(rangeFacet, paramType, params);
+  }
+
+  private static void setQueryFacetParam(AnalyticsRequest request, String groupingName, String facetName, String paramType, String[] params) {
+    AnalyticsGroupingRequest grouping = getGrouping(request, groupingName);
+    
+    AnalyticsQueryFacetRequest queryFacet = new AnalyticsQueryFacetRequest();
+    queryFacet.queries = new HashMap<>();
+    if (paramType.equals("query")||paramType.equals("q")) {
+      for (String param : params) {
+        queryFacet.queries.put(param, param);
+      }
+    }
+    grouping.facets.put(facetName, queryFacet);
+  }
+
+  private static void addExpression(AnalyticsRequest request, String groupingName, String expressionName, String expression) {
+    request.expressions.put(groupingName + expressionName, expression);
+    
+    getGrouping(request, groupingName).expressions.put(expressionName, expression);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java
new file mode 100644
index 0000000..e484e7c
--- /dev/null
+++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/OrdinalCalculator.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.analytics.util;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Calculates ordinals of a comparable list by placing them in the correct positions in the list.
+ * <p>
+ * Implements the QuickSelect algorithm, but modifies it to select multiple ordinals all at once.
+ */
+public class OrdinalCalculator {
+  /**
+   * Calculates a set of ordinal values for a given list of comparable objects.
+   * Once the method returns, the each ordinal is guaranteed to have the correct value in the list.
+   *
+   * @param list the list of {@link Comparable} objects
+   * @param ordinals the collection ordinals to calculate (0 to (size of list) - 1) 
+   */
+  public static <T extends Comparable<T>> void putOrdinalsInPosition(List<T> list, Collection<Integer> ordinals) {
+    int size = list.size();
+    if (size == 0) {
+      return;
+    }
+
+    int[] ords = new int[ordinals.size()];
+    int i = 0;
+    for (int ord : ordinals) {
+      ords[i++] = ord;
+    }
+    Arrays.sort(ords);
+
+    if (ords[0] < 0 || ords[ords.length - 1] > size - 1) {
+      throw new IllegalArgumentException();
+    }
+    distributeAndFind(list, ords, 0, ords.length - 1);
+  }
+
+  private static <T extends Comparable<T>> void distributeAndFind(List<T> list, int[] ordinals, int beginIdx, int endIdx) {
+    if (endIdx < beginIdx) {
+      return;
+    }
+    int middleIdxb = beginIdx;
+    int middleIdxe = beginIdx;
+    int begin = (beginIdx == 0) ? -1 : ordinals[beginIdx - 1];
+    int end = (endIdx == ordinals.length - 1) ? list.size() : ordinals[endIdx + 1];
+    double middle = (begin + end) / 2.0;
+    for (int i = beginIdx; i <= endIdx; i++) {
+      double value = Math.abs(ordinals[i] - middle) - Math.abs(ordinals[middleIdxb] - middle);
+      if (ordinals[i] == ordinals[middleIdxb]) {
+        middleIdxe = i;
+      } else if (value < 0) {
+        middleIdxb = i;
+        do {
+          middleIdxe = i;
+          i++;
+        } while (i <= endIdx && ordinals[middleIdxb] == ordinals[i]);
+        break;
+      }
+    }
+
+    int middlePlace = ordinals[middleIdxb];
+    int beginPlace = begin + 1;
+    int endPlace = end - 1;
+
+    select(list, middlePlace, beginPlace, endPlace);
+    distributeAndFind(list, ordinals, beginIdx, middleIdxb - 1);
+    distributeAndFind(list, ordinals, middleIdxe + 1, endIdx);
+  }
+
+  private static <T extends Comparable<T>> void select(List<T> list, int place, int begin, int end) {
+    T split;
+    if (end - begin < 10) {
+      split = list.get((int) (Math.random() * (end - begin + 1)) + begin);
+    } else {
+      split = split(list, begin, end);
+    }
+
+    Point result = partition(list, begin, end, split);
+
+    if (place <= result.low) {
+      select(list, place, begin, result.low);
+    } else if (place >= result.high) {
+      select(list, place, result.high, end);
+    }
+  }
+
+  private static <T extends Comparable<T>> T split(List<T> list, int begin, int end) {
+    T temp;
+    int num = (end - begin + 1);
+    int recursiveSize = (int) Math.sqrt((double) num);
+    int step = num / recursiveSize;
+    for (int i = 1; i < recursiveSize; i++) {
+      int swapFrom = i * step + begin;
+      int swapTo = i + begin;
+      temp = list.get(swapFrom);
+      list.set(swapFrom, list.get(swapTo));
+      list.set(swapTo, temp);
+    }
+    recursiveSize--;
+    select(list, recursiveSize / 2 + begin, begin, recursiveSize + begin);
+    return list.get(recursiveSize / 2 + begin);
+  }
+
+  private static <T extends Comparable<T>> Point partition(List<T> list, int begin, int end, T indexElement) {
+    T temp;
+    int left, right;
+    for (left = begin, right = end; left <= right; left++, right--) {
+      while (list.get(left).compareTo(indexElement) < 0) {
+        left++;
+      }
+      while (right != begin - 1 && list.get(right).compareTo(indexElement) >= 0) {
+        right--;
+      }
+      if (right <= left) {
+        left--;
+        right++;
+        break;
+      }
+      temp = list.get(left);
+      list.set(left, list.get(right));
+      list.set(right, temp);
+    }
+    while (left > begin - 1 && list.get(left).compareTo(indexElement) >= 0) {
+      left--;
+    }
+    while (right < end + 1 && list.get(right).compareTo(indexElement) <= 0) {
+      right++;
+    }
+    int rightMove = right + 1;
+    while (rightMove < end + 1) {
+      if (list.get(rightMove).equals(indexElement)) {
+        temp = list.get(rightMove);
+        list.set(rightMove, list.get(right));
+        list.set(right, temp);
+        do {
+          right++;
+        } while (list.get(right).equals(indexElement));
+        if (rightMove <= right) {
+          rightMove = right;
+        }
+      }
+      rightMove++;
+    }
+    return new Point(left, right);
+  }
+}
+
+class Point {
+  public int low;
+  public int high;
+
+  public Point(int low, int high) {
+    this.low = low;
+    this.high = high;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d5963beb/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/PercentileCalculator.java
----------------------------------------------------------------------
diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/PercentileCalculator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/PercentileCalculator.java
deleted file mode 100644
index 4ae5cc0..0000000
--- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/util/PercentileCalculator.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.analytics.util;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-public class PercentileCalculator {
-  /**
-   * Calculates a list of percentile values for a given list of objects and percentiles.
-   *
-   * @param list     The list of {@link Comparable} objects to calculate the percentiles of.
-   * @param percents The array of percentiles (.01 to .99) to calculate.
-   * @return a list of comparables
-   */
-  public static <T extends Comparable<T>> List<T> getPercentiles(List<T> list, double[] percents) {
-    int size = list.size();
-    if (size == 0) {
-      return null;
-    }
-
-    int[] percs = new int[percents.length];
-    for (int i = 0; i < percs.length; i++) {
-      percs[i] = (int) Math.round(percents[i] * size - .5);
-    }
-    int[] percentiles = Arrays.copyOf(percs, percs.length);
-    Arrays.sort(percentiles);
-
-    if (percentiles[0] < 0 || percentiles[percentiles.length - 1] > size - 1) {
-      throw new IllegalArgumentException();
-    }
-
-    List<T> results = new ArrayList<>(percs.length);
-
-    distributeAndFind(list, percentiles, 0, percentiles.length - 1);
-
-    for (int i = 0; i < percs.length; i++) {
-      results.add(list.get(percs[i]));
-    }
-    return results;
-  }
-
-  private static <T extends Comparable<T>> void distributeAndFind(List<T> list, int[] percentiles, int beginIdx, int endIdx) {
-    if (endIdx < beginIdx) {
-      return;
-    }
-    int middleIdxb = beginIdx;
-    int middleIdxe = beginIdx;
-    int begin = (beginIdx == 0) ? -1 : percentiles[beginIdx - 1];
-    int end = (endIdx == percentiles.length - 1) ? list.size() : percentiles[endIdx + 1];
-    double middle = (begin + end) / 2.0;
-    for (int i = beginIdx; i <= endIdx; i++) {
-      double value = Math.abs(percentiles[i] - middle) - Math.abs(percentiles[middleIdxb] - middle);
-      if (percentiles[i] == percentiles[middleIdxb]) {
-        middleIdxe = i;
-      } else if (value < 0) {
-        middleIdxb = i;
-        do {
-          middleIdxe = i;
-          i++;
-        } while (i <= endIdx && percentiles[middleIdxb] == percentiles[i]);
-        break;
-      }
-    }
-
-    int middlePlace = percentiles[middleIdxb];
-    int beginPlace = begin + 1;
-    int endPlace = end - 1;
-
-    select(list, middlePlace, beginPlace, endPlace);
-    distributeAndFind(list, percentiles, beginIdx, middleIdxb - 1);
-    distributeAndFind(list, percentiles, middleIdxe + 1, endIdx);
-  }
-
-  private static <T extends Comparable<T>> void select(List<T> list, int place, int begin, int end) {
-    T split;
-    if (end - begin < 10) {
-      split = list.get((int) (Math.random() * (end - begin + 1)) + begin);
-    } else {
-      split = split(list, begin, end);
-    }
-
-    Point result = partition(list, begin, end, split);
-
-    if (place <= result.low) {
-      select(list, place, begin, result.low);
-    } else if (place >= result.high) {
-      select(list, place, result.high, end);
-    }
-  }
-
-  private static <T extends Comparable<T>> T split(List<T> list, int begin, int end) {
-    T temp;
-    int num = (end - begin + 1);
-    int recursiveSize = (int) Math.sqrt((double) num);
-    int step = num / recursiveSize;
-    for (int i = 1; i < recursiveSize; i++) {
-      int swapFrom = i * step + begin;
-      int swapTo = i + begin;
-      temp = list.get(swapFrom);
-      list.set(swapFrom, list.get(swapTo));
-      list.set(swapTo, temp);
-    }
-    recursiveSize--;
-    select(list, recursiveSize / 2 + begin, begin, recursiveSize + begin);
-    return list.get(recursiveSize / 2 + begin);
-  }
-
-  private static <T extends Comparable<T>> Point partition(List<T> list, int begin, int end, T indexElement) {
-    T temp;
-    int left, right;
-    for (left = begin, right = end; left <= right; left++, right--) {
-      while (list.get(left).compareTo(indexElement) < 0) {
-        left++;
-      }
-      while (right != begin - 1 && list.get(right).compareTo(indexElement) >= 0) {
-        right--;
-      }
-      if (right <= left) {
-        left--;
-        right++;
-        break;
-      }
-      temp = list.get(left);
-      list.set(left, list.get(right));
-      list.set(right, temp);
-    }
-    while (left > begin - 1 && list.get(left).compareTo(indexElement) >= 0) {
-      left--;
-    }
-    while (right < end + 1 && list.get(right).compareTo(indexElement) <= 0) {
-      right++;
-    }
-    int rightMove = right + 1;
-    while (rightMove < end + 1) {
-      if (list.get(rightMove).equals(indexElement)) {
-        temp = list.get(rightMove);
-        list.set(rightMove, list.get(right));
-        list.set(right, temp);
-        do {
-          right++;
-        } while (list.get(right).equals(indexElement));
-        if (rightMove <= right) {
-          rightMove = right;
-        }
-      }
-      rightMove++;
-    }
-    return new Point(left, right);
-  }
-}
-
-class Point {
-  public int low;
-  public int high;
-
-  public Point(int low, int high) {
-    this.low = low;
-    this.high = high;
-  }
-}