You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by mi...@apache.org on 2016/02/06 14:27:10 UTC

[17/17] lucene-solr git commit: Merge branch 'lucene-6835'

Merge branch 'lucene-6835'

Retrying file deletion is now the responsibility of Directory.deleteFile, and Directory.listAll now returns all entries in sorted order.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8e784699
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8e784699
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8e784699

Branch: refs/heads/master
Commit: 8e784699f009b80301306f7fc55225d64b95416b
Parents: 072d44f 5b4c1d9
Author: Mike McCandless <mi...@apache.org>
Authored: Sat Feb 6 08:26:15 2016 -0500
Committer: Mike McCandless <mi...@apache.org>
Committed: Sat Feb 6 08:26:15 2016 -0500

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   5 +
 .../analysis/hunspell/Test64kAffixes.java       |   4 +-
 .../analysis/hunspell/TestAllDictionaries.java  |   6 +-
 .../analysis/hunspell/TestAllDictionaries2.java |   6 -
 .../analysis/hunspell/TestDictionary.java       |   6 +-
 .../hunspell/TestHunspellStemFilter.java        |   6 +-
 .../util/TestFilesystemResourceLoader.java      |  28 +-
 .../index/TestBackwardsCompatibility.java       |   8 -
 .../benchmark/byTask/TestPerfTasksLogic.java    |   2 -
 .../benchmark/byTask/utils/StreamUtilsTest.java |  14 +-
 .../codecs/lucene50/Lucene50CompoundReader.java |  14 +-
 .../apache/lucene/index/IndexFileDeleter.java   | 137 +--
 .../org/apache/lucene/index/IndexWriter.java    |  14 +-
 .../index/PersistentSnapshotDeletionPolicy.java |   2 +-
 .../java/org/apache/lucene/store/Directory.java |   3 +-
 .../org/apache/lucene/store/FSDirectory.java    | 147 ++-
 .../lucene/store/FileSwitchDirectory.java       |   6 +-
 .../org/apache/lucene/store/MMapDirectory.java  |   1 +
 .../org/apache/lucene/store/NIOFSDirectory.java |   2 +-
 .../lucene/store/NRTCachingDirectory.java       |   2 -
 .../org/apache/lucene/store/RAMDirectory.java   |   9 +-
 .../apache/lucene/store/SimpleFSDirectory.java  |   1 +
 .../java/org/apache/lucene/util/IOUtils.java    |   8 +-
 .../lucene/util/bkd/OfflinePointWriter.java     |   1 -
 .../org/apache/lucene/index/TestAddIndexes.java |   2 -
 .../index/TestAllFilesCheckIndexHeader.java     |   5 -
 .../index/TestAllFilesDetectTruncation.java     |   5 -
 .../apache/lucene/index/TestAtomicUpdate.java   |   1 -
 .../index/TestBinaryDocValuesUpdates.java       |   4 -
 .../lucene/index/TestCodecHoldsOpenFiles.java   |  12 +-
 .../index/TestConcurrentMergeScheduler.java     |   4 -
 .../apache/lucene/index/TestDeletionPolicy.java |  34 +-
 .../index/TestDemoParallelLeafReader.java       |   8 +-
 .../lucene/index/TestDirectoryReader.java       | 384 ++++----
 .../lucene/index/TestDirectoryReaderReopen.java |  10 +-
 .../test/org/apache/lucene/index/TestDoc.java   |   8 +-
 .../apache/lucene/index/TestFieldsReader.java   |  65 +-
 .../lucene/index/TestIndexFileDeleter.java      |  31 +-
 .../apache/lucene/index/TestIndexWriter.java    | 947 +++++++++----------
 .../lucene/index/TestIndexWriterCommit.java     |  20 -
 .../lucene/index/TestIndexWriterDelete.java     |   9 +-
 .../index/TestIndexWriterDeleteByQuery.java     |   2 +-
 .../lucene/index/TestIndexWriterExceptions.java |  15 +-
 .../lucene/index/TestIndexWriterForceMerge.java |   1 -
 .../lucene/index/TestIndexWriterFromReader.java |  17 +-
 .../lucene/index/TestIndexWriterOnDiskFull.java |   1 -
 .../TestIndexWriterOutOfFileDescriptors.java    |   2 +
 .../lucene/index/TestNRTReaderCleanup.java      |  10 +-
 .../apache/lucene/index/TestNeverDelete.java    |   8 -
 .../index/TestNumericDocValuesUpdates.java      |   4 -
 .../apache/lucene/index/TestOmitPositions.java  |   5 +-
 .../org/apache/lucene/index/TestOmitTf.java     |   5 +-
 .../TestPersistentSnapshotDeletionPolicy.java   |   2 +-
 .../apache/lucene/index/TestRollingUpdates.java |   4 -
 .../index/TestSnapshotDeletionPolicy.java       |  22 +-
 .../apache/lucene/index/TestStressIndexing.java |   2 +-
 .../lucene/index/TestStressIndexing2.java       |   6 +-
 .../org/apache/lucene/index/TestStressNRT.java  |   2 +-
 .../lucene/index/TestSwappedIndexFiles.java     |   9 -
 .../apache/lucene/search/TestPointQueries.java  |  41 +-
 .../lucene/store/TestBufferedIndexInput.java    |  86 +-
 .../org/apache/lucene/store/TestDirectory.java  |  19 +-
 .../lucene/store/TestFileSwitchDirectory.java   |   1 -
 .../lucene/store/TestNativeFSLockFactory.java   |  21 +-
 .../apache/lucene/store/TestRAMDirectory.java   |   1 -
 .../lucene/store/TestSimpleFSLockFactory.java   |   1 +
 .../org/apache/lucene/util/TestIOUtils.java     |   1 +
 .../apache/lucene/util/TestOfflineSorter.java   |  28 +-
 .../org/apache/lucene/util/bkd/TestBKD.java     |   6 -
 .../org/apache/lucene/util/fst/Test2BFST.java   |   1 +
 .../org/apache/lucene/util/fst/TestFSTs.java    |   3 +-
 .../taxonomy/directory/TestAddTaxonomy.java     |  17 +-
 .../writercache/TestCompactLabelToOrdinal.java  |   4 +-
 .../org/apache/lucene/store/RAFDirectory.java   |   1 +
 .../apache/lucene/util/fst/TestFSTsMisc.java    |   1 -
 .../IndexAndTaxonomyReplicationClientTest.java  |  17 -
 .../IndexAndTaxonomyRevisionTest.java           |   8 -
 .../lucene/replicator/IndexRevisionTest.java    |   4 -
 .../lucene/replicator/LocalReplicatorTest.java  |  90 +-
 .../replicator/http/HttpReplicatorTest.java     |   5 -
 .../spatial/util/BaseGeoPointTestCase.java      |   9 -
 .../org/apache/lucene/geo3d/TestGeo3DPoint.java |  11 +-
 .../analyzing/AnalyzingInfixSuggester.java      |   3 +-
 .../search/suggest/fst/ExternalRefSorter.java   |   1 +
 .../lucene/search/suggest/PersistenceTest.java  |   6 +-
 .../search/suggest/TestInputIterator.java       |   6 +-
 .../analyzing/AnalyzingSuggesterTest.java       |   6 +-
 .../suggest/analyzing/FuzzySuggesterTest.java   |   6 +-
 .../search/suggest/fst/BytesRefSortersTest.java |   3 -
 .../search/suggest/fst/FSTCompletionTest.java   |   6 +-
 .../search/suggest/fst/WFSTCompletionTest.java  |   6 +-
 .../lucene/analysis/VocabularyAssert.java       |   2 -
 .../index/BaseCompoundFormatTestCase.java       |   5 -
 .../index/BaseIndexFileFormatTestCase.java      |  11 +-
 .../lucene/index/BasePointFormatTestCase.java   |  11 -
 .../index/BasePostingsFormatTestCase.java       |   2 -
 .../lucene/index/RandomPostingsTester.java      |   1 -
 .../ThreadedIndexingAndSearchingTestCase.java   |   1 -
 .../apache/lucene/mockfile/VirusCheckingFS.java |  80 ++
 .../lucene/store/BaseDirectoryTestCase.java     |  74 +-
 .../lucene/store/BaseLockFactoryTestCase.java   |  46 +-
 .../lucene/store/MockDirectoryWrapper.java      | 203 +---
 .../org/apache/lucene/util/LuceneTestCase.java  |  34 +-
 .../util/TestRuleTemporaryFilesCleanup.java     |   2 +
 .../java/org/apache/lucene/util/TestUtil.java   | 102 +-
 .../TestCompressingStoredFieldsFormat.java      |   4 -
 .../lucene/mockfile/TestVirusCheckingFS.java    |  62 ++
 .../apache/lucene/mockfile/TestWindowsFS.java   |   2 +-
 .../lucene/store/TestMockDirectoryWrapper.java  |   1 -
 .../org/apache/solr/handler/RestoreCore.java    |   1 -
 .../solr/store/blockcache/BlockDirectory.java   |   2 +-
 .../apache/solr/core/MockDirectoryFactory.java  |   3 -
 .../solr/core/MockFSDirectoryFactory.java       |   1 -
 113 files changed, 1536 insertions(+), 1631 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8e784699/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --cc lucene/CHANGES.txt
index 5ec505f,df6723e..c2aa772
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@@ -110,6 -110,6 +110,11 @@@ Changes in Runtime Behavio
    and codec components are no longer allowed to use this extension
    (Robert Muir, Mike McCandless)
  
++* LUCENE-6835: Directory.listAll now returns entries in sorted order,
++  to not leak platform-specific behavior, and "retrying file deletion"
++  is now the responsibility of Directory.deleteFile, not the caller.
++  (Robert Muir, Mike McCandless)
++
  Tests
  
  * LUCENE-7009: Add expectThrows utility to LuceneTestCase. This uses a lambda

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8e784699/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
----------------------------------------------------------------------
diff --cc lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
index 790c73d,0000000..aa1da81
mode 100644,000000..100644
--- a/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
+++ b/lucene/spatial/src/test/org/apache/lucene/spatial/util/BaseGeoPointTestCase.java
@@@ -1,769 -1,0 +1,760 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements.  See the NOTICE file distributed with
 + * this work for additional information regarding copyright ownership.
 + * The ASF licenses this file to You under the Apache License, Version 2.0
 + * (the "License"); you may not use this file except in compliance with
 + * the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.lucene.spatial.util;
 +
 +import java.io.IOException;
 +import java.text.DecimalFormat;
 +import java.text.DecimalFormatSymbols;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Locale;
 +import java.util.Set;
 +import java.util.concurrent.CountDownLatch;
 +import java.util.concurrent.atomic.AtomicBoolean;
 +
 +import org.apache.lucene.document.Document;
 +import org.apache.lucene.document.Field;
 +import org.apache.lucene.document.NumericDocValuesField;
 +import org.apache.lucene.index.DirectoryReader;
 +import org.apache.lucene.index.IndexReader;
 +import org.apache.lucene.index.IndexWriter;
 +import org.apache.lucene.index.IndexWriterConfig;
 +import org.apache.lucene.index.LeafReaderContext;
 +import org.apache.lucene.index.MultiDocValues;
 +import org.apache.lucene.index.NumericDocValues;
 +import org.apache.lucene.index.RandomIndexWriter;
 +import org.apache.lucene.index.Term;
 +import org.apache.lucene.search.IndexSearcher;
 +import org.apache.lucene.search.Query;
 +import org.apache.lucene.search.SimpleCollector;
 +import org.apache.lucene.store.Directory;
 +import org.apache.lucene.store.MockDirectoryWrapper;
 +import org.apache.lucene.util.FixedBitSet;
 +import org.apache.lucene.util.IOUtils;
 +import org.apache.lucene.util.LuceneTestCase;
 +import org.apache.lucene.util.SloppyMath;
 +import org.apache.lucene.util.TestUtil;
 +import org.junit.BeforeClass;
 +
 +// TODO: cutover TestGeoUtils too?
 +
 +public abstract class BaseGeoPointTestCase extends LuceneTestCase {
 +
 +  protected static final String FIELD_NAME = "point";
 +
 +  private static final double LON_SCALE = (0x1L<< GeoEncodingUtils.BITS)/360.0D;
 +  private static final double LAT_SCALE = (0x1L<< GeoEncodingUtils.BITS)/180.0D;
 +
 +  private static double originLat;
 +  private static double originLon;
 +  private static double lonRange;
 +  private static double latRange;
 +
 +  @BeforeClass
 +  public static void beforeClassBase() throws Exception {
 +    // Between 1.0 and 3.0:
 +    lonRange = 2 * (random().nextDouble() + 0.5);
 +    latRange = 2 * (random().nextDouble() + 0.5);
 +
 +    originLon = GeoUtils.normalizeLon(GeoUtils.MIN_LON_INCL + lonRange + (GeoUtils.MAX_LON_INCL - GeoUtils.MIN_LON_INCL - 2 * lonRange) * random().nextDouble());
 +    originLat = GeoUtils.normalizeLat(GeoUtils.MIN_LAT_INCL + latRange + (GeoUtils.MAX_LAT_INCL - GeoUtils.MIN_LAT_INCL - 2 * latRange) * random().nextDouble());
 +  }
 +
 +  /** Return true when testing on a non-small region may be too slow (GeoPoint*Query) */
 +  protected boolean forceSmall() {
 +    return false;
 +  }
 +
 +  // A particularly tricky adversary for BKD tree:
 +  public void testSamePointManyTimes() throws Exception {
 +
 +    // For GeoPointQuery, only run this test nightly:
 +    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
 +
 +    int numPoints = atLeast(1000);
 +    boolean small = random().nextBoolean();
 +
 +    // Every doc has 2 points:
 +    double theLat = randomLat(small);
 +    double theLon = randomLon(small);
 +
 +    double[] lats = new double[numPoints];
 +    Arrays.fill(lats, theLat);
 +
 +    double[] lons = new double[numPoints];
 +    Arrays.fill(lons, theLon);
 +
 +    verify(small, lats, lons);
 +  }
 +
 +  public void testAllLatEqual() throws Exception {
 +
 +    // For GeoPointQuery, only run this test nightly:
 +    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
 +
 +    int numPoints = atLeast(10000);
 +    boolean small = forceSmall() || random().nextBoolean();
 +    double lat = randomLat(small);
 +    double[] lats = new double[numPoints];
 +    double[] lons = new double[numPoints];
 +
 +    boolean haveRealDoc = false;
 +
 +    for(int docID=0;docID<numPoints;docID++) {
 +      int x = random().nextInt(20);
 +      if (x == 17) {
 +        // Some docs don't have a point:
 +        lats[docID] = Double.NaN;
 +        if (VERBOSE) {
 +          System.out.println("  doc=" + docID + " is missing");
 +        }
 +        continue;
 +      }
 +
 +      if (docID > 0 && x == 14 && haveRealDoc) {
 +        int oldDocID;
 +        while (true) {
 +          oldDocID = random().nextInt(docID);
 +          if (Double.isNaN(lats[oldDocID]) == false) {
 +            break;
 +          }
 +        }
 +            
 +        // Fully identical point:
 +        lons[docID] = lons[oldDocID];
 +        if (VERBOSE) {
 +          System.out.println("  doc=" + docID + " lat=" + lat + " lon=" + lons[docID] + " (same lat/lon as doc=" + oldDocID + ")");
 +        }
 +      } else {
 +        lons[docID] = randomLon(small);
 +        haveRealDoc = true;
 +        if (VERBOSE) {
 +          System.out.println("  doc=" + docID + " lat=" + lat + " lon=" + lons[docID]);
 +        }
 +      }
 +      lats[docID] = lat;
 +    }
 +
 +    verify(small, lats, lons);
 +  }
 +
 +  public void testAllLonEqual() throws Exception {
 +
 +    // For GeoPointQuery, only run this test nightly:
 +    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
 +
 +    int numPoints = atLeast(10000);
 +    boolean small = forceSmall() || random().nextBoolean();
 +    double theLon = randomLon(small);
 +    double[] lats = new double[numPoints];
 +    double[] lons = new double[numPoints];
 +
 +    boolean haveRealDoc = false;
 +
 +    //System.out.println("theLon=" + theLon);
 +
 +    for(int docID=0;docID<numPoints;docID++) {
 +      int x = random().nextInt(20);
 +      if (x == 17) {
 +        // Some docs don't have a point:
 +        lats[docID] = Double.NaN;
 +        if (VERBOSE) {
 +          System.out.println("  doc=" + docID + " is missing");
 +        }
 +        continue;
 +      }
 +
 +      if (docID > 0 && x == 14 && haveRealDoc) {
 +        int oldDocID;
 +        while (true) {
 +          oldDocID = random().nextInt(docID);
 +          if (Double.isNaN(lats[oldDocID]) == false) {
 +            break;
 +          }
 +        }
 +            
 +        // Fully identical point:
 +        lats[docID] = lats[oldDocID];
 +        if (VERBOSE) {
 +          System.out.println("  doc=" + docID + " lat=" + lats[docID] + " lon=" + theLon + " (same lat/lon as doc=" + oldDocID + ")");
 +        }
 +      } else {
 +        lats[docID] = randomLat(small);
 +        haveRealDoc = true;
 +        if (VERBOSE) {
 +          System.out.println("  doc=" + docID + " lat=" + lats[docID] + " lon=" + theLon);
 +        }
 +      }
 +      lons[docID] = theLon;
 +    }
 +
 +    verify(small, lats, lons);
 +  }
 +
 +  public void testMultiValued() throws Exception {
 +
 +    // For GeoPointQuery, only run this test nightly:
 +    assumeTrue("GeoPoint*Query is too slow otherwise", TEST_NIGHTLY || forceSmall() == false);
 +
 +    int numPoints = atLeast(10000);
 +    // Every doc has 2 points:
 +    double[] lats = new double[2*numPoints];
 +    double[] lons = new double[2*numPoints];
 +    Directory dir = newDirectory();
-     noVirusChecker(dir);
 +    IndexWriterConfig iwc = newIndexWriterConfig();
 +    initIndexWriterConfig(FIELD_NAME, iwc);
 +
 +    // We rely on docID order:
 +    iwc.setMergePolicy(newLogMergePolicy());
 +    RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
 +
 +    boolean small = random().nextBoolean();
 +
 +    for (int id=0;id<numPoints;id++) {
 +      Document doc = new Document();
 +      lats[2*id] = randomLat(small);
 +      lons[2*id] = randomLon(small);
 +      doc.add(newStringField("id", ""+id, Field.Store.YES));
 +      addPointToDoc(FIELD_NAME, doc, lats[2*id], lons[2*id]);
 +      lats[2*id+1] = randomLat(small);
 +      lons[2*id+1] = randomLon(small);
 +      addPointToDoc(FIELD_NAME, doc, lats[2*id+1], lons[2*id+1]);
 +
 +      if (VERBOSE) {
 +        System.out.println("id=" + id);
 +        System.out.println("  lat=" + lats[2*id] + " lon=" + lons[2*id]);
 +        System.out.println("  lat=" + lats[2*id+1] + " lon=" + lons[2*id+1]);
 +      }
 +      w.addDocument(doc);
 +    }
 +
 +    // TODO: share w/ verify; just need parallel array of the expected ids
 +    if (random().nextBoolean()) {
 +      w.forceMerge(1);
 +    }
 +    IndexReader r = w.getReader();
 +    w.close();
 +
 +    // We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:
 +    IndexSearcher s = newSearcher(r, false);
 +
 +    int iters = atLeast(75);
 +    for (int iter=0;iter<iters;iter++) {
 +      GeoRect rect = randomRect(small, small == false);
 +
 +      if (VERBOSE) {
 +        System.out.println("\nTEST: iter=" + iter + " rect=" + rect);
 +      }
 +
 +      Query query = newRectQuery(FIELD_NAME, rect);
 +
 +      final FixedBitSet hits = new FixedBitSet(r.maxDoc());
 +      s.search(query, new SimpleCollector() {
 +
 +          private int docBase;
 +
 +          @Override
 +          public boolean needsScores() {
 +            return false;
 +          }
 +
 +          @Override
 +          protected void doSetNextReader(LeafReaderContext context) throws IOException {
 +            docBase = context.docBase;
 +          }
 +
 +          @Override
 +          public void collect(int doc) {
 +            hits.set(docBase+doc);
 +          }
 +        });
 +
 +      boolean fail = false;
 +
 +      for(int docID=0;docID<lats.length/2;docID++) {
 +        double latDoc1 = lats[2*docID];
 +        double lonDoc1 = lons[2*docID];
 +        double latDoc2 = lats[2*docID+1];
 +        double lonDoc2 = lons[2*docID+1];
 +        
 +        Boolean result1 = rectContainsPoint(rect, latDoc1, lonDoc1);
 +        if (result1 == null) {
 +          // borderline case: cannot test
 +          continue;
 +        }
 +
 +        Boolean result2 = rectContainsPoint(rect, latDoc2, lonDoc2);
 +        if (result2 == null) {
 +          // borderline case: cannot test
 +          continue;
 +        }
 +
 +        boolean expected = result1 == Boolean.TRUE || result2 == Boolean.TRUE;
 +
 +        if (hits.get(docID) != expected) {
 +          String id = s.doc(docID).get("id");
 +          if (expected) {
 +            System.out.println(Thread.currentThread().getName() + ": id=" + id + " docID=" + docID + " should match but did not");
 +          } else {
 +            System.out.println(Thread.currentThread().getName() + ": id=" + id + " docID=" + docID + " should not match but did");
 +          }
 +          System.out.println("  rect=" + rect);
 +          System.out.println("  lat=" + latDoc1 + " lon=" + lonDoc1 + "\n  lat=" + latDoc2 + " lon=" + lonDoc2);
 +          System.out.println("  result1=" + result1 + " result2=" + result2);
 +          fail = true;
 +        }
 +      }
 +
 +      if (fail) {
 +        fail("some hits were wrong");
 +      }
 +    }
 +    r.close();
 +    dir.close();
 +  }
 +
 +  public void testRandomTiny() throws Exception {
 +    // Make sure single-leaf-node case is OK:
 +    doTestRandom(10);
 +  }
 +
 +  public void testRandomMedium() throws Exception {
 +    doTestRandom(10000);
 +  }
 +
 +  @Nightly
 +  public void testRandomBig() throws Exception {
 +    assumeFalse("Direct codec can OOME on this test", TestUtil.getDocValuesFormat(FIELD_NAME).equals("Direct"));
 +    assumeFalse("Memory codec can OOME on this test", TestUtil.getDocValuesFormat(FIELD_NAME).equals("Memory"));
 +    doTestRandom(200000);
 +  }
 +
 +  private void doTestRandom(int count) throws Exception {
 +
 +    int numPoints = atLeast(count);
 +
 +    if (VERBOSE) {
 +      System.out.println("TEST: numPoints=" + numPoints);
 +    }
 +
 +    double[] lats = new double[numPoints];
 +    double[] lons = new double[numPoints];
 +
 +    boolean small = random().nextBoolean();
 +
 +    boolean haveRealDoc = false;
 +
 +    for (int id=0;id<numPoints;id++) {
 +      int x = random().nextInt(20);
 +      if (x == 17) {
 +        // Some docs don't have a point:
 +        lats[id] = Double.NaN;
 +        if (VERBOSE) {
 +          System.out.println("  id=" + id + " is missing");
 +        }
 +        continue;
 +      }
 +
 +      if (id > 0 && x < 3 && haveRealDoc) {
 +        int oldID;
 +        while (true) {
 +          oldID = random().nextInt(id);
 +          if (Double.isNaN(lats[oldID]) == false) {
 +            break;
 +          }
 +        }
 +            
 +        if (x == 0) {
 +          // Identical lat to old point
 +          lats[id] = lats[oldID];
 +          lons[id] = randomLon(small);
 +          if (VERBOSE) {
 +            System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lat as doc=" + oldID + ")");
 +          }
 +        } else if (x == 1) {
 +          // Identical lon to old point
 +          lats[id] = randomLat(small);
 +          lons[id] = lons[oldID];
 +          if (VERBOSE) {
 +            System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lon as doc=" + oldID + ")");
 +          }
 +        } else {
 +          assert x == 2;
 +          // Fully identical point:
 +          lats[id] = lats[oldID];
 +          lons[id] = lons[oldID];
 +          if (VERBOSE) {
 +            System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id] + " (same lat/lon as doc=" + oldID + ")");
 +          }
 +        }
 +      } else {
 +        lats[id] = randomLat(small);
 +        lons[id] = randomLon(small);
 +        haveRealDoc = true;
 +        if (VERBOSE) {
 +          System.out.println("  id=" + id + " lat=" + lats[id] + " lon=" + lons[id]);
 +        }
 +      }
 +    }
 +
 +    verify(small, lats, lons);
 +  }
 +
 +  public double randomLat(boolean small) {
 +    double result;
 +    if (small) {
 +      result = GeoUtils.normalizeLat(originLat + latRange * (random().nextDouble() - 0.5));
 +    } else {
 +      result = -90 + 180.0 * random().nextDouble();
 +    }
 +    return result;
 +  }
 +
 +  public double randomLon(boolean small) {
 +    double result;
 +    if (small) {
 +      result = GeoUtils.normalizeLon(originLon + lonRange * (random().nextDouble() - 0.5));
 +    } else {
 +      result = -180 + 360.0 * random().nextDouble();
 +    }
 +    return result;
 +  }
 +
 +  protected GeoRect randomRect(boolean small, boolean canCrossDateLine) {
 +    double lat0 = randomLat(small);
 +    double lat1 = randomLat(small);
 +    double lon0 = randomLon(small);
 +    double lon1 = randomLon(small);
 +
 +    if (lat1 < lat0) {
 +      double x = lat0;
 +      lat0 = lat1;
 +      lat1 = x;
 +    }
 +
 +    if (canCrossDateLine == false && lon1 < lon0) {
 +      double x = lon0;
 +      lon0 = lon1;
 +      lon1 = x;
 +    }
 +
 +    return new GeoRect(lon0, lon1, lat0, lat1);
 +  }
 +
 +  protected void initIndexWriterConfig(String field, IndexWriterConfig iwc) {
 +  }
 +
 +  protected abstract void addPointToDoc(String field, Document doc, double lat, double lon);
 +
 +  protected abstract Query newRectQuery(String field, GeoRect bbox);
 +
 +  protected abstract Query newDistanceQuery(String field, double centerLat, double centerLon, double radiusMeters);
 +
 +  protected abstract Query newDistanceRangeQuery(String field, double centerLat, double centerLon, double minRadiusMeters, double radiusMeters);
 +
 +  protected abstract Query newPolygonQuery(String field, double[] lats, double[] lons);
 +
 +  /** Returns null if it's borderline case */
 +  protected abstract Boolean rectContainsPoint(GeoRect rect, double pointLat, double pointLon);
 +
 +  /** Returns null if it's borderline case */
 +  protected abstract Boolean polyRectContainsPoint(GeoRect rect, double pointLat, double pointLon);
 +
 +  /** Returns null if it's borderline case */
 +  protected abstract Boolean circleContainsPoint(double centerLat, double centerLon, double radiusMeters, double pointLat, double pointLon);
 +
 +  protected abstract Boolean distanceRangeContainsPoint(double centerLat, double centerLon, double minRadiusMeters, double radiusMeters, double pointLat, double pointLon);
 +
 +  private static abstract class VerifyHits {
 +
 +    public void test(AtomicBoolean failed, boolean small, IndexSearcher s, NumericDocValues docIDToID, Set<Integer> deleted, Query query, double[] lats, double[] lons) throws Exception {
 +      int maxDoc = s.getIndexReader().maxDoc();
 +      final FixedBitSet hits = new FixedBitSet(maxDoc);
 +      s.search(query, new SimpleCollector() {
 +
 +          private int docBase;
 +
 +          @Override
 +          public boolean needsScores() {
 +            return false;
 +          }
 +
 +          @Override
 +          protected void doSetNextReader(LeafReaderContext context) throws IOException {
 +            docBase = context.docBase;
 +          }
 +
 +          @Override
 +          public void collect(int doc) {
 +            hits.set(docBase+doc);
 +          }
 +        });
 +
 +      boolean fail = false;
 +
 +      for(int docID=0;docID<maxDoc;docID++) {
 +        int id = (int) docIDToID.get(docID);
 +        Boolean expected;
 +        if (deleted.contains(id)) {
 +          expected = false;
 +        } else if (Double.isNaN(lats[id])) {
 +          expected = false;
 +        } else {
 +          expected = shouldMatch(lats[id], lons[id]);
 +        }
 +
 +        // null means it's a borderline case which is allowed to be wrong:
 +        if (expected != null && hits.get(docID) != expected) {
 +          if (expected) {
 +            System.out.println(Thread.currentThread().getName() + ": id=" + id + " should match but did not");
 +          } else {
 +            System.out.println(Thread.currentThread().getName() + ": id=" + id + " should not match but did");
 +          }
 +          System.out.println("  small=" + small + " query=" + query +
 +                             " docID=" + docID + "\n  lat=" + lats[id] + " lon=" + lons[id] +
 +                             "\n  deleted?=" + deleted.contains(id));
 +          if (Double.isNaN(lats[id]) == false) {
 +            describe(docID, lats[id], lons[id]);
 +          }
 +          fail = true;
 +        }
 +      }
 +
 +      if (fail) {
 +        failed.set(true);
 +        fail("some hits were wrong");
 +      }
 +    }
 +
 +    /** Return true if we definitely should match, false if we definitely
 +     *  should not match, and null if it's a borderline case which might
 +     *  go either way. */
 +    protected abstract Boolean shouldMatch(double lat, double lon);
 +
 +    protected abstract void describe(int docID, double lat, double lon);
 +  }
 +
 +  protected void verify(boolean small, double[] lats, double[] lons) throws Exception {
 +    IndexWriterConfig iwc = newIndexWriterConfig();
 +    // Else we can get O(N^2) merging:
 +    int mbd = iwc.getMaxBufferedDocs();
 +    if (mbd != -1 && mbd < lats.length/100) {
 +      iwc.setMaxBufferedDocs(lats.length/100);
 +    }
 +    Directory dir;
 +    if (lats.length > 100000) {
 +      dir = newFSDirectory(createTempDir(getClass().getSimpleName()));
 +    } else {
 +      dir = newDirectory();
 +    }
-     noVirusChecker(dir);
 +
 +    Set<Integer> deleted = new HashSet<>();
 +    // RandomIndexWriter is too slow here:
 +    IndexWriter w = new IndexWriter(dir, iwc);
 +    for(int id=0;id<lats.length;id++) {
 +      Document doc = new Document();
 +      doc.add(newStringField("id", ""+id, Field.Store.NO));
 +      doc.add(new NumericDocValuesField("id", id));
 +      if (Double.isNaN(lats[id]) == false) {
 +        addPointToDoc(FIELD_NAME, doc, lats[id], lons[id]);
 +      }
 +      w.addDocument(doc);
 +      if (id > 0 && random().nextInt(100) == 42) {
 +        int idToDelete = random().nextInt(id);
 +        w.deleteDocuments(new Term("id", ""+idToDelete));
 +        deleted.add(idToDelete);
 +        if (VERBOSE) {
 +          System.out.println("  delete id=" + idToDelete);
 +        }
 +      }
 +    }
 +
 +    if (random().nextBoolean()) {
 +      w.forceMerge(1);
 +    }
 +    final IndexReader r = DirectoryReader.open(w);
 +    w.close();
 +
 +    // We can't wrap with "exotic" readers because the BKD query must see the BKDDVFormat:
 +    IndexSearcher s = newSearcher(r, false);
 +
 +    // Make sure queries are thread safe:
 +    int numThreads = TestUtil.nextInt(random(), 2, 5);
 +
 +    List<Thread> threads = new ArrayList<>();
 +    final int iters = atLeast(75);
 +
 +    final CountDownLatch startingGun = new CountDownLatch(1);
 +    final AtomicBoolean failed = new AtomicBoolean();
 +
 +    for(int i=0;i<numThreads;i++) {
 +      Thread thread = new Thread() {
 +          @Override
 +          public void run() {
 +            try {
 +              _run();
 +            } catch (Exception e) {
 +              failed.set(true);
 +              throw new RuntimeException(e);
 +            }
 +          }
 +
 +          private void _run() throws Exception {
 +            startingGun.await();
 +
 +            NumericDocValues docIDToID = MultiDocValues.getNumericValues(r, "id");
 +
 +            for (int iter=0;iter<iters && failed.get() == false;iter++) {
 +
 +              if (VERBOSE) {
 +                System.out.println("\nTEST: iter=" + iter + " s=" + s);
 +              }
 +              Query query;
 +              VerifyHits verifyHits;
 +
 +              if (random().nextBoolean()) {
 +                // Rect: don't allow dateline crossing when testing small:
 +                final GeoRect rect = randomRect(small, small == false);
 +
 +                query = newRectQuery(FIELD_NAME, rect);
 +
 +                verifyHits = new VerifyHits() {
 +                    @Override
 +                    protected Boolean shouldMatch(double pointLat, double pointLon) {
 +                      return rectContainsPoint(rect, pointLat, pointLon);
 +                    }
 +                    @Override
 +                    protected void describe(int docID, double lat, double lon) {
 +                    }
 +                  };
 +
 +              } else if (random().nextBoolean()) {
 +                // Distance
 +                final boolean rangeQuery = random().nextBoolean();
 +                final double centerLat = randomLat(small);
 +                final double centerLon = randomLon(small);
 +
 +                double radiusMeters;
 +                double minRadiusMeters;
 +
 +                if (small) {
 +                  // Approx 3 degrees lon at the equator:
 +                  radiusMeters = random().nextDouble() * 333000 + 1.0;
 +                } else {
 +                  // So the query can cover at most 50% of the earth's surface:
 +                  radiusMeters = random().nextDouble() * GeoProjectionUtils.SEMIMAJOR_AXIS * Math.PI / 2.0 + 1.0;
 +                }
 +
 +                // generate a random minimum radius between 1% and 95% the max radius
 +                minRadiusMeters = (0.01 + 0.94 * random().nextDouble()) * radiusMeters;
 +
 +                if (VERBOSE) {
 +                  final DecimalFormat df = new DecimalFormat("#,###.00", DecimalFormatSymbols.getInstance(Locale.ENGLISH));
 +                  System.out.println("  radiusMeters = " + df.format(radiusMeters)
 +                      + ((rangeQuery == true) ? " minRadiusMeters = " + df.format(minRadiusMeters) : ""));
 +                }
 +
 +                try {
 +                  if (rangeQuery == true) {
 +                    query = newDistanceRangeQuery(FIELD_NAME, centerLat, centerLon, minRadiusMeters, radiusMeters);
 +                  } else {
 +                    query = newDistanceQuery(FIELD_NAME, centerLat, centerLon, radiusMeters);
 +                  }
 +                } catch (IllegalArgumentException e) {
 +                  if (e.getMessage().contains("exceeds maxRadius")) {
 +                    continue;
 +                  }
 +                  throw e;
 +                }
 +
 +                verifyHits = new VerifyHits() {
 +                    @Override
 +                    protected Boolean shouldMatch(double pointLat, double pointLon) {
 +                      if (rangeQuery == false) {
 +                        return circleContainsPoint(centerLat, centerLon, radiusMeters, pointLat, pointLon);
 +                      } else {
 +                        return distanceRangeContainsPoint(centerLat, centerLon, minRadiusMeters, radiusMeters, pointLat, pointLon);
 +                      }
 +                    }
 +
 +                    @Override
 +                    protected void describe(int docID, double pointLat, double pointLon) {
 +                      double distanceKM = SloppyMath.haversin(centerLat, centerLon, pointLat, pointLon);
 +                      System.out.println("  docID=" + docID + " centerLon=" + centerLon + " centerLat=" + centerLat
 +                          + " pointLon=" + pointLon + " pointLat=" + pointLat + " distanceMeters=" + (distanceKM * 1000)
 +                          + " vs" + ((rangeQuery == true) ? " minRadiusMeters=" + minRadiusMeters : "") + " radiusMeters=" + radiusMeters);
 +                    }
 +                   };
 +
 +              // TODO: get poly query working with dateline crossing too (how?)!
 +              } else {
 +
 +                // TODO: poly query can't handle dateline crossing yet:
 +                final GeoRect bbox = randomRect(small, false);
 +
 +                // Polygon
 +                double[] lats = new double[5];
 +                double[] lons = new double[5];
 +                lats[0] = bbox.minLat;
 +                lons[0] = bbox.minLon;
 +                lats[1] = bbox.maxLat;
 +                lons[1] = bbox.minLon;
 +                lats[2] = bbox.maxLat;
 +                lons[2] = bbox.maxLon;
 +                lats[3] = bbox.minLat;
 +                lons[3] = bbox.maxLon;
 +                lats[4] = bbox.minLat;
 +                lons[4] = bbox.minLon;
 +                query = newPolygonQuery(FIELD_NAME, lats, lons);
 +
 +                verifyHits = new VerifyHits() {
 +                    @Override
 +                    protected Boolean shouldMatch(double pointLat, double pointLon) {
 +                      return polyRectContainsPoint(bbox, pointLat, pointLon);
 +                    }
 +
 +                    @Override
 +                    protected void describe(int docID, double lat, double lon) {
 +                    }
 +                  };
 +              }
 +
 +              if (query != null) {
 +
 +                if (VERBOSE) {
 +                  System.out.println("  query=" + query);
 +                }
 +
 +                verifyHits.test(failed, small, s, docIDToID, deleted, query, lats, lons);
 +              }
 +            }
 +          }
 +      };
 +      thread.setName("T" + i);
 +      thread.start();
 +      threads.add(thread);
 +    }
 +    startingGun.countDown();
 +    for(Thread thread : threads) {
 +      thread.join();
 +    }
 +    IOUtils.close(r, dir);
 +    assertFalse(failed.get());
 +  }
- 
-   protected Directory noVirusChecker(Directory dir) {
-     if (dir instanceof MockDirectoryWrapper) {
-       ((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
-     }
-     return dir;
-   }
 +}