You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by nk...@apache.org on 2016/02/05 18:02:58 UTC

[54/87] [abbrv] lucene-solr git commit: LUCENE-6997: refactors lucene-spatial module to a new lucene-spatial-extras module, and refactors sandbox GeoPointField and queries to lucene-spatial module

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50a2f754/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java b/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
new file mode 100644
index 0000000..6f2ba8e
--- /dev/null
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
@@ -0,0 +1,774 @@
+<<<<<<< HEAD:lucene/sandbox/src/test/org/apache/lucene/util/BaseGeoPointTestCase.java
+=======
+package org.apache.lucene.spatial.util;
+
+>>>>>>> LUCENE-6997: refactors lucene-spatial module to a new lucene-spatial-extras module, and refactors sandbox GeoPointField and queries to lucene-spatial module:lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.util;
+
+import java.io.IOException;
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiDocValues;
+import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.SimpleCollector;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MockDirectoryWrapper;
+import org.apache.lucene.util.FixedBitSet;
+import org.apache.lucene.util.IOUtils;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.SloppyMath;
+import org.apache.lucene.util.TestUtil;
+import org.junit.BeforeClass;
+
+// TODO: cutover TestGeoUtils too?
+
+public abstract class BaseGeoPointTestCase extends LuceneTestCase {
+
+  protected static final String FIELD_NAME = "point";
+
+  private static final double LON_SCALE = (0x1L<< GeoUtils.BITS)/360.0D;
+  private static final double LAT_SCALE = (0x1L<<GeoUtils.BITS)/180.0D;
+
+  private static double originLat;
+  private static double originLon;
+  private static double lonRange;
+  private static double latRange;
+
+  @BeforeClass
+  public static void beforeClassBase() throws Exception {
+    // Between 1.0 and 3.0:
+    lonRange = 2 * (random().nextDouble() + 0.5);
+    latRange = 2 * (random().nextDouble() + 0.5);
+
+    originLon = GeoUtils.normalizeLon(GeoUtils.MIN_LON_INCL + lonRange + (GeoUtils.MAX_LON_INCL - GeoUtils.MIN_LON_INCL - 2 * lonRange) * random().nextDouble());
+    originLat = GeoUtils.normalizeLat(GeoUtils.MIN_LAT_INCL + latRange + (GeoUtils.MAX_LAT_INCL - GeoUtils.MIN_LAT_INCL - 2 * latRange) * random().nextDouble());
+  }
+
+  /** Return true when testing on a non-small region may be too slow (GeoPoint*Query) */
+  protected boolean forceSmall() {
+    return false;
+  }
+
+  // A particularly tricky adversary for BKD tree:
+  public void testSamePointManyTimes() throws Exception {
+
+    // For GeoPointQuery, only run this test nightly:
+    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
+
+    int numPoints = atLeast(1000);
+    boolean small = random().nextBoolean();
+
+    // Every doc has 2 points:
+    double theLat = randomLat(small);
+    double theLon = randomLon(small);
+
+    double[] lats = new double[numPoints];
+    Arrays.fill(lats, theLat);
+
+    double[] lons = new double[numPoints];
+    Arrays.fill(lons, theLon);
+
+    verify(small, lats, lons);
+  }
+
+  public void testAllLatEqual() throws Exception {
+
+    // For GeoPointQuery, only run this test nightly:
+    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
+
+    int numPoints = atLeast(10000);
+    boolean small = forceSmall() || random().nextBoolean();
+    double lat = randomLat(small);
+    double[] lats = new double[numPoints];
+    double[] lons = new double[numPoints];
+
+    boolean haveRealDoc = false;
+
+    for(int docID=0;docID<numPoints;docID++) {
+      int x = random().nextInt(20);
+      if (x == 17) {
+        // Some docs don't have a point:
+        lats[docID] = Double.NaN;
+        if (VERBOSE) {
+          System.out.println("  doc=" + docID + " is missing");
+        }
+        continue;
+      }
+
+      if (docID > 0 && x == 14 && haveRealDoc) {
+        int oldDocID;
+        while (true) {
+          oldDocID = random().nextInt(docID);
+          if (Double.isNaN(lats[oldDocID]) == false) {
+            break;
+          }
+        }
+
+        // Fully identical point:
+        lons[docID] = lons[oldDocID];
+        if (VERBOSE) {
+          System.out.println("  doc=" + docID + " lat=" + lat + " lon=" + lons[docID] + " (same lat/lon as doc=" + oldDocID + ")");
+        }
+      } else {
+        lons[docID] = randomLon(small);
+        haveRealDoc = true;
+        if (VERBOSE) {
+          System.out.println("  doc=" + docID + " lat=" + lat + " lon=" + lons[docID]);
+        }
+      }
+      lats[docID] = lat;
+    }
+
+    verify(small, lats, lons);
+  }
+
+  public void testAllLonEqual() throws Exception {
+
+    // For GeoPointQuery, only run this test nightly:
+    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
+
+    int numPoints = atLeast(10000);
+    boolean small = forceSmall() || random().nextBoolean();
+    double theLon = randomLon(small);
+    double[] lats = new double[numPoints];
+    double[] lons = new double[numPoints];
+
+    boolean haveRealDoc = false;
+
+    //System.out.println("theLon=" + theLon);
+
+    for(int docID=0;docID<numPoints;docID++) {
+      int x = random().nextInt(20);
+      if (x == 17) {
+        // Some docs don't have a point:
+        lats[docID] = Double.NaN;
+        if (VERBOSE) {
+          System.out.println("  doc=" + docID + " is missing");
+        }
+        continue;
+      }
+
+      if (docID > 0 && x == 14 && haveRealDoc) {
+        int oldDocID;
+        while (true) {
+          oldDocID = random().nextInt(docID);
+          if (Double.isNaN(lats[oldDocID]) == false) {
+            break;
+          }
+        }
+
+        // Fully identical point:
+        lats[docID] = lats[oldDocID];
+        if (VERBOSE) {
+          System.out.println("  doc=" + docID + " lat=" + lats[docID] + " lon=" + theLon + " (same lat/lon as doc=" + oldDocID + ")");
+        }
+      } else {
+        lats[docID] = randomLat(small);
+        haveRealDoc = true;
+        if (VERBOSE) {
+          System.out.println("  doc=" + docID + " lat=" + lats[docID] + " lon=" + theLon);
+        }
+      }
+      lons[docID] = theLon;
+    }
+
+    verify(small, lats, lons);
+  }
+
+  public void testMultiValued() throws Exception {
+
+    // For GeoPointQuery, only run this test nightly:
+    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
+
+    int numPoints = atLeast(10000);
+    // Every doc has 2 points:
+    double[] lats = new double[2*numPoints];
+    double[] lons = new double[2*numPoints];
+    Directory dir = newDirectory();
+    noVirusChecker(dir);
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    initIndexWriterConfig(FIELD_NAME, iwc);
+
+    // We rely on docID order:
+    iwc.setMergePolicy(newLogMergePolicy());
+    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
+
+    boolean small = random().nextBoolean();
+
+    for (int id=0;id<numPoints;id++) {
+      Document doc = new Document();
+      lats[2*id] = randomLat(small);
+      lons[2*id] = randomLon(small);
+      doc.add(newStringField("id", ""+id, Field.Store.YES));
+      addPointToDoc(FIELD_NAME, doc, lats[2*id], lons[2*id]);
+      lats[2*id+1] = randomLat(small);
+      lons[2*id+1] = randomLon(small);
+      addPointToDoc(FIELD_NAME, doc, lats[2*id+1], lons[2*id+1]);
+
+      if (VERBOSE) {
+        System.out.println("id=" + id);
+        System.out.println("  lat=" + lats[2*id] + " lon=" + lons[2*id]);
+        System.out.println("  lat=" + lats[2*id+1] + " lon=" + lons[2*id+1]);
+      }
+      w.addDocument(doc);
+    }
+
+    // TODO: share w/ verify; just need parallel array of the expected ids
+    if (random().nextBoolean()) {
+      w.forceMerge(1);
+    }
+    IndexReader r = w.getReader();
+    w.close();
+
+    // We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:
+    IndexSearcher s = newSearcher(r, false);
+
+    int iters = atLeast(75);
+    for (int iter=0;iter<iters;iter++) {
+      GeoBoundingBox rect = randomRect(small, small == false);
+
+      if (VERBOSE) {
+        System.out.println("\nTEST: iter=" + iter + " rect=" + rect);
+      }
+
+      Query query = newRectQuery(FIELD_NAME, rect);
+
+      final FixedBitSet hits = new FixedBitSet(r.maxDoc());
+      s.search(query, new SimpleCollector() {
+
+          private int docBase;
+
+          @Override
+          public boolean needsScores() {
+            return false;
+          }
+
+          @Override
+          protected void doSetNextReader(LeafReaderContext context) throws IOException {
+            docBase = context.docBase;
+          }
+
+          @Override
+          public void collect(int doc) {
+            hits.set(docBase+doc);
+          }
+        });
+
+      boolean fail = false;
+
+      for(int docID=0;docID<lats.length/2;docID++) {
+        double latDoc1 = lats[2*docID];
+        double lonDoc1 = lons[2*docID];
+        double latDoc2 = lats[2*docID+1];
+        double lonDoc2 = lons[2*docID+1];
+
+        Boolean result1 = rectContainsPoint(rect, latDoc1, lonDoc1);
+        if (result1 == null) {
+          // borderline case: cannot test
+          continue;
+        }
+
+        Boolean result2 = rectContainsPoint(rect, latDoc2, lonDoc2);
+        if (result2 == null) {
+          // borderline case: cannot test
+          continue;
+        }
+
+        boolean expected = result1 == Boolean.TRUE || result2 == Boolean.TRUE;
+
+        if (hits.get(docID) != expected) {
+          String id = s.doc(docID).get("id");
+          if (expected) {
+            System.out.println(Thread.currentThread().getName() + ": id=" + id + " docID=" + docID + " should match but did not");
+          } else {
+            System.out.println(Thread.currentThread().getName() + ": id=" + id + " docID=" + docID + " should not match but did");
+          }
+          System.out.println("  rect=" + rect);
+          System.out.println("  lat=" + latDoc1 + " lon=" + lonDoc1 + "\n  lat=" + latDoc2 + " lon=" + lonDoc2);
+          System.out.println("  result1=" + result1 + " result2=" + result2);
+          fail = true;
+        }
+      }
+
+      if (fail) {
+        fail("some hits were wrong");
+      }
+    }
+    r.close();
+    dir.close();
+  }
+
+  public void testRandomTiny() throws Exception {
+    // Make sure single-leaf-node case is OK:
+    doTestRandom(10);
+  }
+
+  public void testRandomMedium() throws Exception {
+    doTestRandom(10000);
+  }
+
+  @Nightly
+  public void testRandomBig() throws Exception {
+    assumeFalse("Direct codec can OOME on this test", TestUtil.getDocValuesFormat(FIELD_NAME).equals("Direct"));
+    assumeFalse("Memory codec can OOME on this test", TestUtil.getDocValuesFormat(FIELD_NAME).equals("Memory"));
+    doTestRandom(200000);
+  }
+
+  private void doTestRandom(int count) throws Exception {
+
+    int numPoints = atLeast(count);
+
+    if (VERBOSE) {
+      System.out.println("TEST: numPoints=" + numPoints);
+    }
+
+    double[] lats = new double[numPoints];
+    double[] lons = new double[numPoints];
+
+    boolean small = random().nextBoolean();
+
+    boolean haveRealDoc = false;
+
+    for (int id=0;id<numPoints;id++) {
+      int x = random().nextInt(20);
+      if (x == 17) {
+        // Some docs don't have a point:
+        lats[id] = Double.NaN;
+        if (VERBOSE) {
+          System.out.println("  id=" + id + " is missing");
+        }
+        continue;
+      }
+
+      if (id > 0 && x < 3 && haveRealDoc) {
+        int oldID;
+        while (true) {
+          oldID = random().nextInt(id);
+          if (Double.isNaN(lats[oldID]) == false) {
+            break;
+          }
+        }
+
+        if (x == 0) {
+          // Identical lat to old point
+          lats[id] = lats[oldID];
+          lons[id] = randomLon(small);
+          if (VERBOSE) {
+            System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lat as doc=" + oldID + ")");
+          }
+        } else if (x == 1) {
+          // Identical lon to old point
+          lats[id] = randomLat(small);
+          lons[id] = lons[oldID];
+          if (VERBOSE) {
+            System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lon as doc=" + oldID + ")");
+          }
+        } else {
+          assert x == 2;
+          // Fully identical point:
+          lats[id] = lats[oldID];
+          lons[id] = lons[oldID];
+          if (VERBOSE) {
+            System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lat/lon as doc=" + oldID + ")");
+          }
+        }
+      } else {
+        lats[id] = randomLat(small);
+        lons[id] = randomLon(small);
+        haveRealDoc = true;
+        if (VERBOSE) {
+          System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id]);
+        }
+      }
+    }
+
+    verify(small, lats, lons);
+  }
+
+  public double randomLat(boolean small) {
+    double result;
+    if (small) {
+      result = GeoUtils.normalizeLat(originLat + latRange * (random().nextDouble() - 0.5));
+    } else {
+      result = -90 + 180.0 * random().nextDouble();
+    }
+    return result;
+  }
+
+  public double randomLon(boolean small) {
+    double result;
+    if (small) {
+      result = GeoUtils.normalizeLon(originLon + lonRange * (random().nextDouble() - 0.5));
+    } else {
+      result = -180 + 360.0 * random().nextDouble();
+    }
+    return result;
+  }
+
+  protected GeoBoundingBox randomRect(boolean small, boolean canCrossDateLine) {
+    double lat0 = randomLat(small);
+    double lat1 = randomLat(small);
+    double lon0 = randomLon(small);
+    double lon1 = randomLon(small);
+
+    if (lat1 < lat0) {
+      double x = lat0;
+      lat0 = lat1;
+      lat1 = x;
+    }
+
+    if (canCrossDateLine == false && lon1 < lon0) {
+      double x = lon0;
+      lon0 = lon1;
+      lon1 = x;
+    }
+
+    return new GeoBoundingBox(lon0, lon1, lat0, lat1);
+  }
+
+  protected void initIndexWriterConfig(String field, IndexWriterConfig iwc) {
+  }
+
+  protected abstract void addPointToDoc(String field, Document doc, double lat, double lon);
+
+  protected abstract Query newRectQuery(String field, GeoBoundingBox bbox);
+
+  protected abstract Query newDistanceQuery(String field, double centerLat, double centerLon, double radiusMeters);
+
+  protected abstract Query newDistanceRangeQuery(String field, double centerLat, double centerLon, double minRadiusMeters, double radiusMeters);
+
+  protected abstract Query newPolygonQuery(String field, double[] lats, double[] lons);
+
+  /** Returns null if it's borderline case */
+  protected abstract Boolean rectContainsPoint(GeoBoundingBox rect, double pointLat, double pointLon);
+
+  /** Returns null if it's borderline case */
+  protected abstract Boolean polyRectContainsPoint(GeoBoundingBox rect, double pointLat, double pointLon);
+
+  /** Returns null if it's borderline case */
+  protected abstract Boolean circleContainsPoint(double centerLat, double centerLon, double radiusMeters, double pointLat, double pointLon);
+
+  protected abstract Boolean distanceRangeContainsPoint(double centerLat, double centerLon, double minRadiusMeters, double radiusMeters, double pointLat, double pointLon);
+
+  private static abstract class VerifyHits {
+
+    public void test(AtomicBoolean failed, boolean small, IndexSearcher s, NumericDocValues docIDToID, Set<Integer> deleted, Query query, double[] lats, double[] lons) throws Exception {
+      int maxDoc = s.getIndexReader().maxDoc();
+      final FixedBitSet hits = new FixedBitSet(maxDoc);
+      s.search(query, new SimpleCollector() {
+
+          private int docBase;
+
+          @Override
+          public boolean needsScores() {
+            return false;
+          }
+
+          @Override
+          protected void doSetNextReader(LeafReaderContext context) throws IOException {
+            docBase = context.docBase;
+          }
+
+          @Override
+          public void collect(int doc) {
+            hits.set(docBase+doc);
+          }
+        });
+
+      boolean fail = false;
+
+      for(int docID=0;docID<maxDoc;docID++) {
+        int id = (int) docIDToID.get(docID);
+        Boolean expected;
+        if (deleted.contains(id)) {
+          expected = false;
+        } else if (Double.isNaN(lats[id])) {
+          expected = false;
+        } else {
+          expected = shouldMatch(lats[id], lons[id]);
+        }
+
+        // null means it's a borderline case which is allowed to be wrong:
+        if (expected != null && hits.get(docID) != expected) {
+          if (expected) {
+            System.out.println(Thread.currentThread().getName() + ": id=" + id + " should match but did not");
+          } else {
+            System.out.println(Thread.currentThread().getName() + ": id=" + id + " should not match but did");
+          }
+          System.out.println("  small=" + small + " query=" + query +
+                             " docID=" + docID + "\n  lat=" + lats[id] + " lon=" + lons[id] +
+                             "\n  deleted?=" + deleted.contains(id));
+          if (Double.isNaN(lats[id]) == false) {
+            describe(docID, lats[id], lons[id]);
+          }
+          fail = true;
+        }
+      }
+
+      if (fail) {
+        failed.set(true);
+        fail("some hits were wrong");
+      }
+    }
+
+    /** Return true if we definitely should match, false if we definitely
+     *  should not match, and null if it's a borderline case which might
+     *  go either way. */
+    protected abstract Boolean shouldMatch(double lat, double lon);
+
+    protected abstract void describe(int docID, double lat, double lon);
+  }
+
+  protected void verify(boolean small, double[] lats, double[] lons) throws Exception {
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    // Else we can get O(N^2) merging:
+    int mbd = iwc.getMaxBufferedDocs();
+    if (mbd != -1 && mbd < lats.length/100) {
+      iwc.setMaxBufferedDocs(lats.length/100);
+    }
+    Directory dir;
+    if (lats.length > 100000) {
+      dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
+    } else {
+      dir = newDirectory();
+    }
+    noVirusChecker(dir);
+
+    Set<Integer> deleted = new HashSet<>();
+    // RandomIndexWriter is too slow here:
+    IndexWriter w = new IndexWriter(dir, iwc);
+    for(int id=0;id<lats.length;id++) {
+      Document doc = new Document();
+      doc.add(newStringField("id", ""+id, Field.Store.NO));
+      doc.add(new NumericDocValuesField("id", id));
+      if (Double.isNaN(lats[id]) == false) {
+        addPointToDoc(FIELD_NAME, doc, lats[id], lons[id]);
+      }
+      w.addDocument(doc);
+      if (id > 0 && random().nextInt(100) == 42) {
+        int idToDelete = random().nextInt(id);
+        w.deleteDocuments(new Term("id", ""+idToDelete));
+        deleted.add(idToDelete);
+        if (VERBOSE) {
+          System.out.println("  delete id=" + idToDelete);
+        }
+      }
+    }
+
+    if (random().nextBoolean()) {
+      w.forceMerge(1);
+    }
+    final IndexReader r = DirectoryReader.open(w);
+    w.close();
+
+    // We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:
+    IndexSearcher s = newSearcher(r, false);
+
+    // Make sure queries are thread safe:
+    int numThreads = TestUtil.nextInt(random(), 2, 5);
+
+    List<Thread> threads = new ArrayList<>();
+    final int iters = atLeast(75);
+
+    final CountDownLatch startingGun = new CountDownLatch(1);
+    final AtomicBoolean failed = new AtomicBoolean();
+
+    for(int i=0;i<numThreads;i++) {
+      Thread thread = new Thread() {
+          @Override
+          public void run() {
+            try {
+              _run();
+            } catch (Exception e) {
+              failed.set(true);
+              throw new RuntimeException(e);
+            }
+          }
+
+          private void _run() throws Exception {
+            startingGun.await();
+
+            NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id");
+
+            for (int iter=0;iter<iters && failed.get() == false;iter++) {
+
+              if (VERBOSE) {
+                System.out.println("\nTEST: iter=" + iter + " s=" + s);
+              }
+              Query query;
+              VerifyHits verifyHits;
+
+              if (random().nextBoolean()) {
+                // Rect: don't allow dateline crossing when testing small:
+                final GeoBoundingBox rect = randomRect(small, small == false);
+
+                query = newRectQuery(FIELD_NAME, rect);
+
+                verifyHits = new VerifyHits() {
+                    @Override
+                    protected Boolean shouldMatch(double pointLat, double pointLon) {
+                      return rectContainsPoint(rect, pointLat, pointLon);
+                    }
+                    @Override
+                    protected void describe(int docID, double lat, double lon) {
+                    }
+                  };
+
+              } else if (random().nextBoolean()) {
+                // Distance
+                final boolean rangeQuery = random().nextBoolean();
+                final double centerLat = randomLat(small);
+                final double centerLon = randomLon(small);
+
+                double radiusMeters;
+                double minRadiusMeters;
+
+                if (small) {
+                  // Approx 3 degrees lon at the equator:
+                  radiusMeters = random().nextDouble() * 333000 + 1.0;
+                } else {
+                  // So the query can cover at most 50% of the earth's surface:
+                  radiusMeters = random().nextDouble() * GeoProjectionUtils.SEMIMAJOR_AXIS * Math.PI / 2.0 + 1.0;
+                }
+
+                // generate a random minimum radius between 1% and 95% the max radius
+                minRadiusMeters = (0.01 + 0.94 * random().nextDouble()) * radiusMeters;
+
+                if (VERBOSE) {
+                  final DecimalFormat df = new DecimalFormat("#,###.00", DecimalFormatSymbols.getInstance(Locale.ENGLISH));
+                  System.out.println("  radiusMeters = " + df.format(radiusMeters)
+                      + ((rangeQuery == true) ? " minRadiusMeters = " + df.format(minRadiusMeters) : ""));
+                }
+
+                try {
+                  if (rangeQuery == true) {
+                    query = newDistanceRangeQuery(FIELD_NAME, centerLat, centerLon, minRadiusMeters, radiusMeters);
+                  } else {
+                    query = newDistanceQuery(FIELD_NAME, centerLat, centerLon, radiusMeters);
+                  }
+                } catch (IllegalArgumentException e) {
+                  if (e.getMessage().contains("exceeds maxRadius")) {
+                    continue;
+                  }
+                  throw e;
+                }
+
+                verifyHits = new VerifyHits() {
+                    @Override
+                    protected Boolean shouldMatch(double pointLat, double pointLon) {
+                      if (rangeQuery == false) {
+                        return circleContainsPoint(centerLat, centerLon, radiusMeters, pointLat, pointLon);
+                      } else {
+                        return distanceRangeContainsPoint(centerLat, centerLon, minRadiusMeters, radiusMeters, pointLat, pointLon);
+                      }
+                    }
+
+                    @Override
+                    protected void describe(int docID, double pointLat, double pointLon) {
+                      double distanceKM = SloppyMath.haversin(centerLat, centerLon, pointLat, pointLon);
+                      System.out.println("  docID=" + docID + " centerLon=" + centerLon + " centerLat=" + centerLat
+                          + " pointLon=" + pointLon + " pointLat=" + pointLat + " distanceMeters=" + (distanceKM * 1000)
+                          + " vs" + ((rangeQuery == true) ? " minRadiusMeters=" + minRadiusMeters : "") + " radiusMeters=" + radiusMeters);
+                    }
+                   };
+
+              // TODO: get poly query working with dateline crossing too (how?)!
+              } else {
+
+                // TODO: poly query can't handle dateline crossing yet:
+                final GeoBoundingBox bbox = randomRect(small, false);
+
+                // Polygon
+                double[] lats = new double[5];
+                double[] lons = new double[5];
+                lats[0] = bbox.minLat;
+                lons[0] = bbox.minLon;
+                lats[1] = bbox.maxLat;
+                lons[1] = bbox.minLon;
+                lats[2] = bbox.maxLat;
+                lons[2] = bbox.maxLon;
+                lats[3] = bbox.minLat;
+                lons[3] = bbox.maxLon;
+                lats[4] = bbox.minLat;
+                lons[4] = bbox.minLon;
+                query = newPolygonQuery(FIELD_NAME, lats, lons);
+
+                verifyHits = new VerifyHits() {
+                    @Override
+                    protected Boolean shouldMatch(double pointLat, double pointLon) {
+                      return polyRectContainsPoint(bbox, pointLat, pointLon);
+                    }
+
+                    @Override
+                    protected void describe(int docID, double lat, double lon) {
+                    }
+                  };
+              }
+
+              if (query != null) {
+
+                if (VERBOSE) {
+                  System.out.println("  query=" + query);
+                }
+
+                verifyHits.test(failed, small, s, docIDToID, deleted, query, lats, lons);
+              }
+            }
+          }
+      };
+      thread.setName("T" + i);
+      thread.start();
+      threads.add(thread);
+    }
+    startingGun.countDown();
+    for(Thread thread : threads) {
+      thread.join();
+    }
+    IOUtils.close(r, dir);
+    assertFalse(failed.get());
+  }
+
+  protected Directory noVirusChecker(Directory dir) {
+    if (dir instanceof MockDirectoryWrapper) {
+      ((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
+    }
+    return dir;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50a2f754/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoUtils.java
----------------------------------------------------------------------
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoUtils.java b/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoUtils.java
new file mode 100644
index 0000000..96b1268
--- /dev/null
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoUtils.java
@@ -0,0 +1,551 @@
+<<<<<<< HEAD:lucene/sandbox/src/test/org/apache/lucene/util/TestGeoUtils.java
+=======
+package org.apache.lucene.spatial.util;
+
+>>>>>>> LUCENE-6997: refactors lucene-spatial module to a new lucene-spatial-extras module, and refactors sandbox GeoPointField and queries to lucene-spatial module:lucene/spatial/src/test/org/apache/lucene/spatial/util/TestGeoUtils.java
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.util;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.junit.BeforeClass;
+
+import com.carrotsearch.randomizedtesting.generators.RandomInts;
+
+import static org.apache.lucene.spatial.util.GeoDistanceUtils.DISTANCE_PCT_ERR;
+
+/**
+ * Tests class for methods in GeoUtils
+ *
+ * @lucene.experimental
+ */
+public class TestGeoUtils extends LuceneTestCase {
+
+  private static final double LON_SCALE = (0x1L<< GeoUtils.BITS)/360.0D;
+  private static final double LAT_SCALE = (0x1L<<GeoUtils.BITS)/180.0D;
+
+  // Global bounding box we will "cover" in the random test; we have to make this "smallish" else the queries take very long:
+  private static double originLat;
+  private static double originLon;
+  //  private static double range;
+  private static double lonRange;
+  private static double latRange;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    // Between 1.0 and 3.0:
+    lonRange = 2 * (random().nextDouble() + 0.5);
+    latRange = 2 * (random().nextDouble() + 0.5);
+
+    originLon = GeoUtils.MIN_LON_INCL + lonRange + (GeoUtils.MAX_LON_INCL - GeoUtils.MIN_LON_INCL - 2 * lonRange) * random().nextDouble();
+    originLon = GeoUtils.normalizeLon(originLon);
+    originLat = GeoUtils.MIN_LAT_INCL + latRange + (GeoUtils.MAX_LAT_INCL - GeoUtils.MIN_LAT_INCL - 2 * latRange) * random().nextDouble();
+    originLat = GeoUtils.normalizeLat(originLat);
+
+    if (VERBOSE) {
+      System.out.println("TEST: originLon=" + originLon + " lonRange= " + lonRange + " originLat=" + originLat + " latRange=" + latRange);
+    }
+  }
+
+  public void testGeoHash() {
+    int numPoints = atLeast(100);
+    String randomGeoHashString;
+    String mortonGeoHash;
+    long mortonLongFromGHLong, geoHashLong, mortonLongFromGHString;
+    int randomLevel;
+    for (int i = 0; i < numPoints; ++i) {
+      // random point
+      double lat = randomLat(false);
+      double lon = randomLon(false);
+
+      // compute geohash straight from lat/lon and from morton encoded value to ensure they're the same
+      randomGeoHashString = GeoHashUtils.stringEncode(lon, lat, randomLevel = random().nextInt(12 - 1) + 1);
+      mortonGeoHash = GeoHashUtils.stringEncodeFromMortonLong(GeoUtils.mortonHash(lon, lat), randomLevel);
+      assertEquals(randomGeoHashString, mortonGeoHash);
+
+      // v&v conversion from lat/lon or geohashstring to geohash long and back to geohash string
+      geoHashLong = (random().nextBoolean()) ? GeoHashUtils.longEncode(lon, lat, randomLevel) : GeoHashUtils.longEncode(randomGeoHashString);
+      assertEquals(randomGeoHashString, GeoHashUtils.stringEncode(geoHashLong));
+
+      // v&v conversion from geohash long to morton long
+      mortonLongFromGHString = GeoHashUtils.mortonEncode(randomGeoHashString);
+      mortonLongFromGHLong = GeoHashUtils.mortonEncode(geoHashLong);
+      assertEquals(mortonLongFromGHLong, mortonLongFromGHString);
+
+      // v&v lat/lon from geohash string and geohash long
+      assertEquals(GeoUtils.mortonUnhashLat(mortonLongFromGHString), GeoUtils.mortonUnhashLat(mortonLongFromGHLong), 0);
+      assertEquals(GeoUtils.mortonUnhashLon(mortonLongFromGHString), GeoUtils.mortonUnhashLon(mortonLongFromGHLong), 0);
+    }
+  }
+
+  /**
+   * Pass condition: lat=42.6, lng=-5.6 should be encoded as "ezs42e44yx96",
+   * lat=57.64911 lng=10.40744 should be encoded as "u4pruydqqvj8"
+   */
+  public void testEncode() {
+    String hash = GeoHashUtils.stringEncode(-5.6, 42.6, 12);
+    assertEquals("ezs42e44yx96", hash);
+
+    hash = GeoHashUtils.stringEncode(10.40744, 57.64911, 12);
+    assertEquals("u4pruydqqvj8", hash);
+  }
+
+  /**
+   * Pass condition: lat=52.3738007, lng=4.8909347 should be encoded and then
+   * decoded within 0.00001 of the original value
+   */
+  public void testDecodePreciseLongitudeLatitude() {
+    final String geohash = GeoHashUtils.stringEncode(4.8909347, 52.3738007);
+    final long hash = GeoHashUtils.mortonEncode(geohash);
+
+    assertEquals(52.3738007, GeoUtils.mortonUnhashLat(hash), 0.00001D);
+    assertEquals(4.8909347, GeoUtils.mortonUnhashLon(hash), 0.00001D);
+  }
+
+  /**
+   * Pass condition: lat=84.6, lng=10.5 should be encoded and then decoded
+   * within 0.00001 of the original value
+   */
+  public void testDecodeImpreciseLongitudeLatitude() {
+    final String geohash = GeoHashUtils.stringEncode(10.5, 84.6);
+
+    final long hash = GeoHashUtils.mortonEncode(geohash);
+
+    assertEquals(84.6, GeoUtils.mortonUnhashLat(hash), 0.00001D);
+    assertEquals(10.5, GeoUtils.mortonUnhashLon(hash), 0.00001D);
+  }
+
+  public void testDecodeEncode() {
+    final String geoHash = "u173zq37x014";
+    assertEquals(geoHash, GeoHashUtils.stringEncode(4.8909347, 52.3738007));
+    final long mortonHash = GeoHashUtils.mortonEncode(geoHash);
+    final double lon = GeoUtils.mortonUnhashLon(mortonHash);
+    final double lat = GeoUtils.mortonUnhashLat(mortonHash);
+    assertEquals(52.37380061d, GeoUtils.mortonUnhashLat(mortonHash), 0.000001d);
+    assertEquals(4.8909343d, GeoUtils.mortonUnhashLon(mortonHash), 0.000001d);
+
+    assertEquals(geoHash, GeoHashUtils.stringEncode(lon, lat));
+  }
+
+  public void testNeighbors() {
+    String geohash = "gcpv";
+    List<String> expectedNeighbors = new ArrayList<>();
+    expectedNeighbors.add("gcpw");
+    expectedNeighbors.add("gcpy");
+    expectedNeighbors.add("u10n");
+    expectedNeighbors.add("gcpt");
+    expectedNeighbors.add("u10j");
+    expectedNeighbors.add("gcps");
+    expectedNeighbors.add("gcpu");
+    expectedNeighbors.add("u10h");
+    Collection<? super String> neighbors = new ArrayList<>();
+    GeoHashUtils.addNeighbors(geohash, neighbors );
+    assertEquals(expectedNeighbors, neighbors);
+
+    // Border odd geohash
+    geohash = "u09x";
+    expectedNeighbors = new ArrayList<>();
+    expectedNeighbors.add("u0c2");
+    expectedNeighbors.add("u0c8");
+    expectedNeighbors.add("u0cb");
+    expectedNeighbors.add("u09r");
+    expectedNeighbors.add("u09z");
+    expectedNeighbors.add("u09q");
+    expectedNeighbors.add("u09w");
+    expectedNeighbors.add("u09y");
+    neighbors = new ArrayList<>();
+    GeoHashUtils.addNeighbors(geohash, neighbors);
+    assertEquals(expectedNeighbors, neighbors);
+
+    // Border even geohash
+    geohash = "u09tv";
+    expectedNeighbors = new ArrayList<>();
+    expectedNeighbors.add("u09wh");
+    expectedNeighbors.add("u09wj");
+    expectedNeighbors.add("u09wn");
+    expectedNeighbors.add("u09tu");
+    expectedNeighbors.add("u09ty");
+    expectedNeighbors.add("u09ts");
+    expectedNeighbors.add("u09tt");
+    expectedNeighbors.add("u09tw");
+    neighbors = new ArrayList<>();
+    GeoHashUtils.addNeighbors(geohash, neighbors );
+    assertEquals(expectedNeighbors, neighbors);
+
+    // Border even and odd geohash
+    geohash = "ezzzz";
+    expectedNeighbors = new ArrayList<>();
+    expectedNeighbors.add("gbpbn");
+    expectedNeighbors.add("gbpbp");
+    expectedNeighbors.add("u0000");
+    expectedNeighbors.add("ezzzy");
+    expectedNeighbors.add("spbpb");
+    expectedNeighbors.add("ezzzw");
+    expectedNeighbors.add("ezzzx");
+    expectedNeighbors.add("spbp8");
+    neighbors = new ArrayList<>();
+    GeoHashUtils.addNeighbors(geohash, neighbors );
+    assertEquals(expectedNeighbors, neighbors);
+  }
+
+  public void testClosestPointOnBBox() {
+    double[] result = new double[2];
+    GeoDistanceUtils.closestPointOnBBox(20, 30, 40, 50, 70, 70, result);
+    assertEquals(40.0, result[0], 0.0);
+    assertEquals(50.0, result[1], 0.0);
+
+    GeoDistanceUtils.closestPointOnBBox(-20, -20, 0, 0, 70, 70, result);
+    assertEquals(0.0, result[0], 0.0);
+    assertEquals(0.0, result[1], 0.0);
+  }
+
+  private static class Cell {
+    static int nextCellID;
+
+    final Cell parent;
+    final int cellID;
+    final double minLon, maxLon;
+    final double minLat, maxLat;
+    final int splitCount;
+
+    public Cell(Cell parent,
+                double minLon, double minLat,
+                double maxLon, double maxLat,
+                int splitCount) {
+      assert maxLon >= minLon;
+      assert maxLat >= minLat;
+      this.parent = parent;
+      this.minLon = minLon;
+      this.minLat = minLat;
+      this.maxLon = maxLon;
+      this.maxLat = maxLat;
+      this.cellID = nextCellID++;
+      this.splitCount = splitCount;
+    }
+
+    /** Returns true if the quantized point lies within this cell, inclusive on all bounds. */
+    public boolean contains(double lon, double lat) {
+      return lon >= minLon && lon <= maxLon && lat >= minLat && lat <= maxLat;
+    }
+
+    @Override
+    public String toString() {
+      return "cell=" + cellID + (parent == null ? "" : " parentCellID=" + parent.cellID) + " lon: " + minLon + " TO " + maxLon + ", lat: " + minLat + " TO " + maxLat + ", splits: " + splitCount;
+    }
+  }
+
+  public long scaleLon(final double val) {
+    return (long) ((val-GeoUtils.MIN_LON_INCL) * LON_SCALE);
+  }
+
+  public long scaleLat(final double val) {
+    return (long) ((val-GeoUtils.MIN_LAT_INCL) * LAT_SCALE);
+  }
+
+  public double unscaleLon(final long val) {
+    return (val / LON_SCALE) + GeoUtils.MIN_LON_INCL;
+  }
+
+  public double unscaleLat(final long val) {
+    return (val / LAT_SCALE) + GeoUtils.MIN_LAT_INCL;
+  }
+
+  public double randomLat(boolean small) {
+    double result;
+    if (small) {
+      result = GeoUtils.normalizeLat(originLat + latRange * (random().nextDouble() - 0.5));
+    } else {
+      result = -90 + 180.0 * random().nextDouble();
+    }
+    return result;
+  }
+
+  public double randomLon(boolean small) {
+    double result;
+    if (small) {
+      result = GeoUtils.normalizeLon(originLon + lonRange * (random().nextDouble() - 0.5));
+    } else {
+      result = -180 + 360.0 * random().nextDouble();
+    }
+    return result;
+  }
+
+  private void findMatches(Set<Integer> hits, PrintWriter log, Cell root,
+                           double centerLon, double centerLat, double radiusMeters,
+                           double[] docLons, double[] docLats) {
+
+    if (VERBOSE) {
+      log.println("  root cell: " + root);
+    }
+
+    List<Cell> queue = new ArrayList<>();
+    queue.add(root);
+
+    int recurseDepth = RandomInts.randomIntBetween(random(), 5, 15);
+
+    while (queue.size() > 0) {
+      Cell cell = queue.get(queue.size()-1);
+      queue.remove(queue.size()-1);
+      if (VERBOSE) {
+        log.println("  cycle: " + cell + " queue.size()=" + queue.size());
+      }
+
+      if (random().nextInt(10) == 7 || cell.splitCount > recurseDepth) {
+        if (VERBOSE) {
+          log.println("    leaf");
+        }
+        // Leaf cell: brute force check all docs that fall within this cell:
+        for(int docID=0;docID<docLons.length;docID++) {
+          if (cell.contains(docLons[docID], docLats[docID])) {
+            double distanceMeters = GeoDistanceUtils.haversin(centerLat, centerLon, docLats[docID], docLons[docID]);
+            if (distanceMeters <= radiusMeters) {
+              if (VERBOSE) {
+                log.println("    check doc=" + docID + ": match!");
+              }
+              hits.add(docID);
+            } else {
+              if (VERBOSE) {
+                log.println("    check doc=" + docID + ": no match");
+              }
+            }
+          }
+        }
+      } else {
+
+        if (GeoRelationUtils.rectWithinCircle(cell.minLon, cell.minLat, cell.maxLon, cell.maxLat, centerLon, centerLat, radiusMeters)) {
+          // Query circle fully contains this cell, just addAll:
+          if (VERBOSE) {
+            log.println("    circle fully contains cell: now addAll");
+          }
+          for(int docID=0;docID<docLons.length;docID++) {
+            if (cell.contains(docLons[docID], docLats[docID])) {
+              if (VERBOSE) {
+                log.println("    addAll doc=" + docID);
+              }
+              hits.add(docID);
+            }
+          }
+          continue;
+        } else if (GeoRelationUtils.rectWithin(root.minLon, root.minLat, root.maxLon, root.maxLat,
+                                       cell.minLon, cell.minLat, cell.maxLon, cell.maxLat)) {
+          // Fall through below to "recurse"
+          if (VERBOSE) {
+            log.println("    cell fully contains circle: keep splitting");
+          }
+        } else if (GeoRelationUtils.rectCrossesCircle(cell.minLon, cell.minLat, cell.maxLon, cell.maxLat,
+                                              centerLon, centerLat, radiusMeters)) {
+          // Fall through below to "recurse"
+          if (VERBOSE) {
+            log.println("    cell overlaps circle: keep splitting");
+          }
+        } else {
+          if (VERBOSE) {
+            log.println("    no overlap: drop this cell");
+            for(int docID=0;docID<docLons.length;docID++) {
+              if (cell.contains(docLons[docID], docLats[docID])) {
+                if (VERBOSE) {
+                  log.println("    skip doc=" + docID);
+                }
+              }
+            }
+          }
+          continue;
+        }
+
+        // Randomly split:
+        if (random().nextBoolean()) {
+
+          // Split on lon:
+          double splitValue = cell.minLon + (cell.maxLon - cell.minLon) * random().nextDouble();
+          if (VERBOSE) {
+            log.println("    now split on lon=" + splitValue);
+          }
+          Cell cell1 = new Cell(cell,
+                                cell.minLon, cell.minLat,
+                                splitValue, cell.maxLat,
+                                cell.splitCount+1);
+          Cell cell2 = new Cell(cell,
+                                splitValue, cell.minLat,
+                                cell.maxLon, cell.maxLat,
+                                cell.splitCount+1);
+          if (VERBOSE) {
+            log.println("    split cell1: " + cell1);
+            log.println("    split cell2: " + cell2);
+          }
+          queue.add(cell1);
+          queue.add(cell2);
+        } else {
+
+          // Split on lat:
+          double splitValue = cell.minLat + (cell.maxLat - cell.minLat) * random().nextDouble();
+          if (VERBOSE) {
+            log.println("    now split on lat=" + splitValue);
+          }
+          Cell cell1 = new Cell(cell,
+                                cell.minLon, cell.minLat,
+                                cell.maxLon, splitValue,
+                                cell.splitCount+1);
+          Cell cell2 = new Cell(cell,
+                                cell.minLon, splitValue,
+                                cell.maxLon, cell.maxLat,
+                                cell.splitCount+1);
+          if (VERBOSE) {
+            log.println("    split cells:\n      " + cell1 + "\n      " + cell2);
+          }
+          queue.add(cell1);
+          queue.add(cell2);
+        }
+      }
+    }
+  }
+
+  /** Tests consistency of GeoUtils.rectWithinCircle, .rectCrossesCircle, .rectWithin and SloppyMath.haversine distance check */
+  public void testGeoRelations() throws Exception {
+
+    int numDocs = atLeast(1000);
+
+    boolean useSmallRanges = random().nextBoolean();
+
+    if (VERBOSE) {
+      System.out.println("TEST: " + numDocs + " docs useSmallRanges=" + useSmallRanges);
+    }
+
+    double[] docLons = new double[numDocs];
+    double[] docLats = new double[numDocs];
+    for(int docID=0;docID<numDocs;docID++) {
+      docLons[docID] = randomLon(useSmallRanges);
+      docLats[docID] = randomLat(useSmallRanges);
+      if (VERBOSE) {
+        System.out.println("  doc=" + docID + ": lon=" + docLons[docID] + " lat=" + docLats[docID]);
+      }
+    }
+
+    int iters = atLeast(10);
+
+    iters = atLeast(50);
+
+    for(int iter=0;iter<iters;iter++) {
+
+      Cell.nextCellID = 0;
+
+      double centerLon = randomLon(useSmallRanges);
+      double centerLat = randomLat(useSmallRanges);
+
+      // So the circle covers at most 50% of the earth's surface:
+
+      double radiusMeters;
+
+      // TODO: large exotic rectangles created by BKD may be inaccurate up to 2 times DISTANCE_PCT_ERR.
+      // restricting size until LUCENE-6994 can be addressed
+      if (true || useSmallRanges) {
+        // Approx 3 degrees lon at the equator:
+        radiusMeters = random().nextDouble() * 333000;
+      } else {
+        radiusMeters = random().nextDouble() * GeoProjectionUtils.SEMIMAJOR_AXIS * Math.PI / 2.0;
+      }
+
+      StringWriter sw = new StringWriter();
+      PrintWriter log = new PrintWriter(sw, true);
+
+      if (VERBOSE) {
+        log.println("\nTEST: iter=" + iter + " radiusMeters=" + radiusMeters + " centerLon=" + centerLon + " centerLat=" + centerLat);
+      }
+
+      GeoBoundingBox bbox = GeoUtils.circleToBBox(centerLon, centerLat, radiusMeters);
+
+      Set<Integer> hits = new HashSet<>();
+
+      if (bbox.maxLon < bbox.minLon) {
+        // Crosses dateline
+        log.println("  circle crosses dateline; first left query");
+        double unwrappedLon = centerLon;
+        if (unwrappedLon > bbox.maxLon) {
+          // unwrap left
+          unwrappedLon += -360.0D;
+        }
+        findMatches(hits, log,
+                    new Cell(null,
+                             -180, bbox.minLat,
+                             bbox.maxLon, bbox.maxLat,
+                             0),
+                    unwrappedLon, centerLat, radiusMeters, docLons, docLats);
+        log.println("  circle crosses dateline; now right query");
+        if (unwrappedLon < bbox.maxLon) {
+          // unwrap right
+          unwrappedLon += 360.0D;
+        }
+        findMatches(hits, log,
+                    new Cell(null,
+                             bbox.minLon, bbox.minLat,
+                             180, bbox.maxLat,
+                             0),
+                    unwrappedLon, centerLat, radiusMeters, docLons, docLats);
+      } else {
+        // Start with the root cell that fully contains the shape:
+        findMatches(hits, log,
+                    new Cell(null,
+                             bbox.minLon, bbox.minLat,
+                             bbox.maxLon, bbox.maxLat,
+                             0),
+                    centerLon, centerLat, radiusMeters,
+                    docLons, docLats);
+      }
+
+      if (VERBOSE) {
+        log.println("  " + hits.size() + " hits");
+      }
+
+      int failCount = 0;
+
+      // Done matching, now verify:
+      for(int docID=0;docID<numDocs;docID++) {
+        double distanceMeters = GeoDistanceUtils.haversin(centerLat, centerLon, docLats[docID], docLons[docID]);
+        final Boolean expected;
+        final double percentError = Math.abs(distanceMeters - radiusMeters) / distanceMeters;
+        if (percentError <= DISTANCE_PCT_ERR) {
+          expected = null;
+        } else {
+          expected = distanceMeters <= radiusMeters;
+        }
+
+        boolean actual = hits.contains(docID);
+        if (expected != null && actual != expected) {
+          if (actual) {
+            log.println("doc=" + docID + " matched but should not with distance error " + percentError + " on iteration " + iter);
+          } else {
+            log.println("doc=" + docID + " did not match but should with distance error " + percentError + " on iteration " + iter);
+          }
+          log.println("  lon=" + docLons[docID] + " lat=" + docLats[docID] + " distanceMeters=" + distanceMeters + " vs radiusMeters=" + radiusMeters);
+          failCount++;
+        }
+      }
+
+      if (failCount != 0) {
+        System.out.print(sw.toString());
+        fail(failCount + " incorrect hits (see above)");
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50a2f754/lucene/spatial/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java
----------------------------------------------------------------------
diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java b/lucene/spatial/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java
deleted file mode 100644
index d62a0a8..0000000
--- a/lucene/spatial/src/test/org/apache/lucene/spatial/vector/TestPointVectorStrategy.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.spatial.vector;
-
-import com.spatial4j.core.context.SpatialContext;
-import com.spatial4j.core.shape.Circle;
-import com.spatial4j.core.shape.Point;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.spatial.SpatialMatchConcern;
-import org.apache.lucene.spatial.StrategyTestCase;
-import org.apache.lucene.spatial.query.SpatialArgs;
-import org.apache.lucene.spatial.query.SpatialOperation;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.IOException;
-
-public class TestPointVectorStrategy extends StrategyTestCase {
-
-  @Before
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-    this.ctx = SpatialContext.GEO;
-    this.strategy = new PointVectorStrategy(ctx, getClass().getSimpleName());
-  }
-
-  @Test
-  public void testCircleShapeSupport() {
-    Circle circle = ctx.makeCircle(ctx.makePoint(0, 0), 10);
-    SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, circle);
-    Query query = this.strategy.makeQuery(args);
-
-    assertNotNull(query);
-  }
-
-  @Test(expected = UnsupportedOperationException.class)
-  public void testInvalidQueryShape() {
-    Point point = ctx.makePoint(0, 0);
-    SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, point);
-    this.strategy.makeQuery(args);
-  }
-
-  @Test
-  public void testCitiesIntersectsBBox() throws IOException {
-    getAddAndVerifyIndexedDocuments(DATA_WORLD_CITIES_POINTS);
-    executeQueries(SpatialMatchConcern.FILTER, QTEST_Cities_Intersects_BBox);
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/50a2f754/solr/common-build.xml
----------------------------------------------------------------------
diff --git a/solr/common-build.xml b/solr/common-build.xml
index eab251b..6a06928 100644
--- a/solr/common-build.xml
+++ b/solr/common-build.xml
@@ -100,7 +100,7 @@
     <pathelement location="${highlighter.jar}"/>
     <pathelement location="${memory.jar}"/>
     <pathelement location="${misc.jar}"/>
-    <pathelement location="${spatial.jar}"/>
+    <pathelement location="${spatial-extras.jar}"/>
     <pathelement location="${expressions.jar}"/>
     <pathelement location="${suggest.jar}"/>
     <pathelement location="${grouping.jar}"/>
@@ -169,7 +169,7 @@
 
   <target name="prep-lucene-jars" 
           depends="jar-lucene-core, jar-backward-codecs, jar-analyzers-phonetic, jar-analyzers-kuromoji, jar-codecs,jar-expressions, jar-suggest, jar-highlighter, jar-memory,
-                   jar-misc, jar-spatial, jar-grouping, jar-queries, jar-queryparser, jar-join, jar-sandbox">
+                   jar-misc, jar-spatial-extras, jar-grouping, jar-queries, jar-queryparser, jar-join, jar-sandbox">
       <property name="solr.deps.compiled" value="true"/>
   </target>
   
@@ -245,7 +245,7 @@
   <property name="lucenedocs" location="${common.dir}/build/docs"/>
 
   <!-- dependency to ensure all lucene javadocs are present -->
-  <target name="lucene-javadocs" depends="javadocs-lucene-core,javadocs-analyzers-common,javadocs-analyzers-icu,javadocs-analyzers-kuromoji,javadocs-analyzers-phonetic,javadocs-analyzers-smartcn,javadocs-analyzers-morfologik,javadocs-analyzers-stempel,javadocs-analyzers-uima,javadocs-backward-codecs,javadocs-codecs,javadocs-expressions,javadocs-suggest,javadocs-grouping,javadocs-queries,javadocs-queryparser,javadocs-highlighter,javadocs-memory,javadocs-misc,javadocs-spatial,javadocs-join,javadocs-test-framework"/>
+  <target name="lucene-javadocs" depends="javadocs-lucene-core,javadocs-analyzers-common,javadocs-analyzers-icu,javadocs-analyzers-kuromoji,javadocs-analyzers-phonetic,javadocs-analyzers-smartcn,javadocs-analyzers-morfologik,javadocs-analyzers-stempel,javadocs-analyzers-uima,javadocs-backward-codecs,javadocs-codecs,javadocs-expressions,javadocs-suggest,javadocs-grouping,javadocs-queries,javadocs-queryparser,javadocs-highlighter,javadocs-memory,javadocs-misc,javadocs-spatial-extras,javadocs-join,javadocs-test-framework"/>
 
   <!-- create javadocs for the current module -->
   <target name="javadocs" depends="compile-core,define-lucene-javadoc-url,lucene-javadocs,javadocs-solr-core,check-javadocs-uptodate" unless="javadocs-uptodate-${name}">
@@ -322,7 +322,7 @@
           <link offline="true" href="${lucene.javadoc.url}highlighter" packagelistloc="${lucenedocs}/highlighter"/>
           <link offline="true" href="${lucene.javadoc.url}memory" packagelistloc="${lucenedocs}/memory"/>
           <link offline="true" href="${lucene.javadoc.url}misc" packagelistloc="${lucenedocs}/misc"/>
-          <link offline="true" href="${lucene.javadoc.url}spatial" packagelistloc="${lucenedocs}/spatial"/>
+          <link offline="true" href="${lucene.javadoc.url}spatial-extras" packagelistloc="${lucenedocs}/spatial-extras"/>
           <links/>
           <link href=""/>
         </sources>