You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by so...@apache.org on 2020/06/03 19:13:29 UTC

[lucene-solr] branch jira/lucene-8962 updated (3205782 -> 107fc17)

This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a change to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git.


    from 3205782  LUCENE-9259: Fix wrong NGramFilterFactory argument name for preserveOriginal option
     add 4501b3d  Revert "LUCENE-8962: Split test case (#1313)"
     add 01688cd8 SOLR-14073: Fix segment look ahead NPE in CollapsingQParserPlugin
     add 7b9f212  LUCENE-9268: Add some random tests to IndexWriter
     add c7cf9e8  LUCENE-9254: UniformSplit supports FST off-heap.
     add 03c2557  LUCENE-9263: Fix wrong transformation of distance in meters to radians in Geo3DPoint (#1318)
     add c8dea5d  LUCENE-9259: Fix wrong NGramFilterFactory argument name for preserveOriginal option
     add 44bdfb2  Consolidated process event logic after CRUD action (#1325)
     add e6616ba  SOLR-14073: Update CHANGES.txt
     add 8a90806  move entry in CHANGES.txt from 8.6 to 8.5
     add 193e4a6  SOLR-14139: Update CHANGE.txt
     add 354f07c  Remove unused scripts in dev-tools folder (#1326)
     add 79feb93  LUCENE-9164: process all events before closing gracefully (#1319)
     add 751fbec  Remove some unused lines from addBackcompatIndexes.py related to svn (#1322)
     add 9842744  Add 8.6 section to solr CHANGES.txt (#1337)
     add 732348e  SOLR-14197: SolrResourceLoader refactorings to reduce API * Remove SRL.listConfigDir (unused) * Remove SRL.getDataDir * Remove SRL.getCoreName * Remove SRL.getCoreProperties  XmlConfigFile needs to be passed in the substitutableProperties  IndexSchema needs to be passed in the substitutableProperties  Remove redundant Properties from CoreContainer constructors * Remove SRL.newAdminHandlerInstance (unused) * Remove SRL.openSchema and openConfig * Avoid SRL.getConfigDir  A [...]
     add d4a137d  LUCENE-9242: generate javadocs by calling Ant javadoc task (#1304)
     add 32a2076  LUCENE-9229: fix Ref Guide broken links
     add ed59c3e  LUCENE-9272: Move checksum verification of the `.tip` file to `checkIntegrity()`. (#1339)
     add f0a4973  LUCENE-9270: Update Javadoc about normalizeEntry in the Kuromoji DictionaryBuilder
     add e43f857  LUCENE-9272: Add a CHANGES entry.
     add 5286098  LUCENE-8849: DocValuesRewriteMethod.visit should visit subquery
     add b1ec1cd  LUCENE-9258: DocTermsIndexDocValues' range scorer didn't support multi-valued fields
     add 26a32d7  Changes to release scripts for 8.5 release
     add 8a940e7  LUCENE-9171: Add CHANGES entry SOLR-12238: Add CHANGES entry
     add daf1498  Correctly pass gpg_key to release script
     add f312ca3  Fix python syntax in smoke tester
     add 232d940  Really fix python syntax in smoke tester
     add 9a8602c  SOLR-14316: Fix unchecked type warning in JavaBinCodec (#1344)
     add c266044  Add LZ4 NOTICE section from lucene to solr NOTICE.txt
     add 0f10b5f  SOLR-13264: IndexSizeTrigger aboveOp / belowOp properties not in valid properties.
     add 74721fa  SOLR-14289 Skip ZkChroot check when not necessary (#1298)
     add c0cf7bb  LUCENE-9276: Use same code-path for updateDocuments and updateDocument (#1346)
     add adb829c  [logging] log actual size of transient core cache
     add cbd0dcb  SOLR-14254: Docs for text tagger: FST50 trade-off (#1332)
     add bd16620  LUCENE-9164: fix changes entry
     add 87b1bdd  LUCENE-8103: Use TwoPhaseIterator in DoubleValuesSource and QueryValueSource Fixes #1343
     add 73b618a  LUCENE-9279: Update dictionary version for Ukrainian analyzer (#1354)
     add 6c1d992  SOLR-14312: SOLR-14296: Upgrade Zookeeper to 3.5.7, Update netty to 4.1.47
     add 261e7ba  LUCENE-8103: Revert QueryValueSource.objectVal change
     add 6ae69d3  Document sort param tiebreak logic (#1349)
     add 1abed9a  Revert "LUCENE-9279: Update dictionary version for Ukrainian analyzer (#1354)"
     add 7fe6f9c  LUCENE-9279: Update dictionary version for Ukrainian analyzer (with corrected checksums).
     add bdb40fb  Cleanup DWPT for readability (#1350)
     add bf25e65  LUCENE-9279: add changes entry and attribution.
     add fda9354  SOLR-8306: Optimize expand.rows=0 to compute only total hits (#1334)
     add a1485ab  SOLR-14338: add missing close list tag in CDCR documentation
     add 2b327e5d SOLR-13199: Fix NPE in ChildDocTransformer when parenFilter is invalid
     add 6a59d44  LUCENE-8908: return def val from objectVal when exists returns false
     add 0b063fd  SOLR-10157: improve error message in case of unknown aggregations
     add 7f37a55  SOLR-13944: remove redundant checks in SpellCheckCollator
     add 4fd96be  SOLR-14256: replaced EMPTY with empty() to fix deadlock
     add 126e4a6  LUCENE-9283: Exclude DelimitedBoostTokenFilter from TestRandomChains
     add e9d6c24  SOLR-11725: use corrected sample formula to calc stdDev in JSON facets
     add 78e670f  SOLR-14012: return long from unique and hll even for standalone
     add e36733d  SOLR-14350: fix test failure due to SOLR-14012
     add 5fd55d7  SOLR-12353: SolrDispatchFilter expensive non-conditional debug line degrades performance
     add ae2eadb  solr-upgrade-notes.adoc: highlighter sizing is different
     add 6296703  ivy settings: local maven repo pattern needs classifier (#1367)
     add aaf08c9  LUCENE-9275: make TestLatLonMultiPolygonShapeQueries more resilient for CONTAINS queries (#1345)
     add 06fd70f  SOLR-14348: split TestJsonFacets to multiple test classes
     add 5630619  SOLR-14343: set initcapacity properly in NamedList
     add 7f460fa  fix typos in subquery doc transformer
     add 68e4304  SOLR-14347: Autoscaling placement wrong when concurrent replica placements are calculated.
     add be5c407  Update RDF files for 8.5.0 release
     add 20abf3e  Add 8.5.0 back-compat indices
     add 674aba6  LUCENE-9287: UsageTrackingQueryCachingPolicy no longer caches DocValuesFieldExistsQuery (#1374)
     add aad814b  SOLR-14340: Remove unnecessary configset verification checks Improves CLUSTERSTATUS times for massive clusters. Closes #1373
     add ad75916  LUCENE-9283: Also exclude DelimitedBoostTokenFilter from TestFactories
     add 2c7a710  LUCENE-9281: Retire SPIClassIterator from master because Java 9+ uses different mechanism to load services when module system is used (#1360)
     add 075adac  remove LUCENE-8962 from CHANGES.txt
     add 8d937c1  SOLR-14274 Do not register multiple sets of JVM metrics (#1299)
     add 4f03ce5  SOLR-14284 add expressible support to list, and add example of removing a component (#1292)
     add b0728ce  SOLR-14128: Improve distributed locking around managed schema upgrade process.
     add 255132f  SOLR-14302: Ensure Solr always includes the stacktrace for exceptions by using '-OmitStackTraceInFastThrow'
     add cd9375a  LUCENE-9266 Update smoke test for gradle
     add ea864b4  SOLR-13659: Remove unused SolrCacheHolder accidental leftover from reverted plugin work
     add a31ecd2  SOLR-14322 Improve AbstractFullDistribZkTestBase.waitForThingsToLevelOut
     add d1601f6  SOLR-14260: SolrJ pluggable ConnectionSocketFactory in HttpClientUtil see SocketFactoryRegistryProvider Fixes #1261
     add a0b0c71  SOLR-14342: Improve core loading order in SolrCloud. Makes collections available sooner and reduces leaderVoteWait timeouts in large SolrCloud clusters. This fixes a previous attempt to do this. Fixes #1366
     add 8cb50a5  LUCENE-9290 Fix TestXYPoint#testEqualsAndHashCode
     add ac866a6  Remove CurrentCoreDescriptorProvider (#1384)
     add 84f6507  LUCENE-9133 Fix for potential NPE in TermFilteredPresearcher#buildQuery
     add 7a83f09  SOLR-13842: remove redundant defaults from implictPlugins
     add 15330a8  SOLR-14329: support choosing expand field from multiple collapse group
     add 132228d  SOLR-14344: remove deprecated HttpSolrClient's Remote*Exception
     add 1a2325a  SOLR-13893: fix typo in BlobRepository's max jar size sys property
     add 9de6811  SOLR-13893: remove deprecated runtme.lib.size sys property
     add 782ded2  SOLR-14317: HttpClusterStateProvider throws exception when only one node down (Closes #1342)
     add 5c2011a  SOLR-14367: Upgrade Tika to 1.24
     add 1ca7067  SOLR-12028: BadApple and AwaitsFix annotations usage, Unannotated tests that haven't failed in a while
     add 46d0116  LUCENE-9170: Use HTTPS when downloading wagon-ssh artifacts
     add 1f5705f  SOLR-14363: Separate /get requests into their own type designation (#1379)
     add 9ed71a6  LUCENE-9074: Slice Allocation Control Plane For Concurrent Searches (#1294)
     add d6cef4f  Update CHANGES.txt
     add f779bc6  SOLR-14307: User defined "<cache/>" entries in solrconfig.xml now support enabled="true|false" just like core searcher caches.
     add 927587d  fix typo (#1302)
     add e25ab42  LUCENE-9266 remove gradle wrapper jar from source
     add e609079  Specify java 11 for gradle wrapper downloader
     add 28dea8d  SOLR-14356: PeerSync should not fail with SocketTimeoutException from hanging nodes
     add ac2837c  SOLR-14378: Factor a FilterFeatureScorer class out from (contrib/ltr) OriginalScoreScorer.
     add b5c5ebe  LUCENE-9300: Fix field infos update on doc values update (#1394)
     add 9b29c2a  Support running gradlew from subdir
     add 234e783  SOLR-14359: Admin UI collection/core drop-downs had wrong placeholder text (#1400)
     add d749469  SOLR-14359: Remove apache header from MIT licensed js file
     add 7b3980c  SOLR-14364: LTR SolrFeature fq improvements   Mostly general code improvements, though it should support postFilters now Add QueryUtils.combineQueryAndFilter
     add 1aeefc2  SOLR-14351: Fix/improve MDCLoggingContext usage * Some set/clear were not balanced. * Harden clear() in case of imbalance. * Sometimes coreContainger.getCore was called unnecessarily; just need a descriptor * SolrCore.open/close now calls MDCLoggerContext.setCore/clear * no need to clear MDC in HttpSolrCall
     add d32858b  LUCENE-9301: add manifest entries to JARs (gradle build).
     add e1e2085  SOLR-14386: Update Jetty to 9.4.27 and dropwizard-metrics version to 4.1.5
     add e916056  SOLR-14386: Update Jetty to 9.4.27 and dropwizard-metrics version to 4.1.5, fix precommit
     add 9322a7b  SOLR-12067: Remove support for autoReplicaFailoverWaitAfterExpiration
     add 9b6e072  SOLR-12720: Use the right Jira issue in change log
     add f018c4c  LUCENE-9244: In 2D, a point can be shared by four leaves (#1279)
     add f2114b9  SOLR-14210: Include replica health in healtcheck handler (#1387)
     add 82692e7  LUCENE-9271: Move BufferedIndexInput to the ByteBuffer API.
     add 3363e1a  LUCENE-9271: Fix bad assertion.
     add 529042e  LUCENE-9271: Complete fix for setBufferSize.
     add de62339  LUCENE-8050: PerFieldDocValuesFormat should not get the DocValuesFormat on a field that has no doc values. Closes #1408
     add 5bfbdc5  SOLR-14376: optimize SolrIndexSearcher.getDocSet when matches everything * getProcessedFilter now returns null filter if it's all docs more reliably * getProcessedFilter now documented clearly as an internal method * getDocSet detects all-docs and exits early with getLiveDocs * small refactoring to getDocSetBits/makeDocSetBits Closes #1399
     add 013898d  CHANGES.txt: move entry to Optimizations
     add 793a3be  LUCENE-9266: correct windows gradle wrapper download script - wrong placement of the quote.
     add dbb4be1  LUCENE-9310: workaround for IntelliJ gradle import
     add 4f92cd4  LUCENE-9278: Use -linkoffline instead of relative paths to make links to other projects (#1388)
     add 6bba35a  LUCENE-9286: FST.Arc.BitTable reads directly FST bytes. Arc is lightweight again and FSTEnum traversal faster.
     add c7cac57  LUCENE-9077: make git always keep .gradle files with LF EOLs.
     add 2437f3f  LUCENE-9311: detect intellij reimport and modify sourceset to exclude solr-ref-guide/tools (#1422)
     add ffdd29e  Fix typo in SolrRequestHandler's javadocs.
     add adbd714  SOLR-14365: CollapsingQParser - Avoiding always allocate int[] and float[] with size equals to number of unique values (WIP) (#1395)
     add 2935186  LUCENE-9298: Improve RAM accounting in BufferedUpdates when deleted doc IDs and terms are cleared (#1389)
     add e376582  LUCENE-9309: Wait for #addIndexes merges when aborting merges (#1418)
     add 71d335f  SOLR-14365: Automatically grow size of groupHeadValues
     add 36b280b  SOLR-11775: return long val for facet count in json facet
     add 527e651  LUCENE-9298: Fix TestBufferedUpdates
     add d52c102  SOLR-14402: Avoid creating new exceptions for every request made to MDCAwareThreadPoolExecutor by distributed search.
     add 2602269  LUCENE-9304: Refactor DWPTPool to pool DWPT directly (#1397)
     add 8c1f981  LUCENE-9309: ensure stopMerges is set under IW lock
     add fea1ce0  LUCENE-9278: move declaration calling getTemporaryDir inside the execution block closure so that gradlew clean renderJavadoc doesn't wipe out the temporary directory before the task has a chance to run.
     add 9244558  LUCENE-9201: remove javadoc task remnants. Make javadoc depend on renderJavadoc and skip the default gradle's implementation.
     add 7279190  LUCENE-9316: Incorporate all :precommit tasks into :check
     add f865c8a  LUCENE-9077: add a :solr:packaging:dev task that assembles a 'development' image of Solr from which nothing is removed upon consecutive rebuild.
     add 04f4439  SOLR-14396: TaggerRequestHandler should not error on empty index Fixes #1421
     add 4dece1a  CHANGES.txt move SOLR-14396 oops!
     add 13f19f6  SOLR-9906: SolrjNamedThreadFactory is deprecated in favor of SolrNamedThreadFactory. DefaultSolrThreadFactory is removed from solr-core in favor of SolrNamedThreadFactory in solrj package and all solr-core classes now use SolrNamedThreadFactory
     add 6b78330  SOLR-9909: Add the right Jira issue to CHANGES.txt
     add 4df81f1  SOLR-9909: The deprecated SolrjNamedThreadFactory has been removed. Use SolrNamedThreadFactory instead.
     add 3e0f7b1  SOLR-9909: Actually delete SolrjNamedThreadFactory.java
     add 616ec98  Do a bit count on 8 bytes from a long directly instead of reading 8 bytes from the reader. Byte order doesn't matter here. (#1426)
     add f5457b8  Suppress Direct postings for TestIndexWriterThreadsToSegments to prevent OOM on Nightly
     add 3236d38  Avoid using a raw Arc type. (#1429)
     add e15b02c  SOLR-14359: Use correct placeholder-text-single option for anguar-chosen
     add 60545a8  SOLR-14210: Add javadocs and refguide docs
     add ceeb55c  SOLR-14210: Fix precommit for javadocs
     add 18af632  LUCENE-9304: Fix IW#getMaxCompletedSequenceNumber()  (#1427)
     add 47bc184  Move DWPT private deletes out of FrozenBufferedUpdates (#1431)
     add 1fc4a54  Solr: Use QueryUtils.combineQueryAndFilter more  and check MatchAllDocsQuery (minor & cheap optimization)  Closes #1407
     add aa605b3  LUCENE-9307: Remove the ability to set the buffer size dynamically on BufferedIndexInput (#1415)
     add 0aa4ba7  LUCENE-9260: Verify checksums of CFS files. (#1311)
     add d5720d6  add version 8.5.1 to doap files
     add 8a88ab0  Add bugfix version 8.5.1
     add b7b85f3  Move bugfix entries to version 8.5.1
     add 9340e56  Add back-compat indices for 8.5.1
     add b24b028  SOLR-14291: fix regexps to handle dotted fields in Old Analytics params.
     add 74ecc13  SOLR-14387 add testcase for ids with separators to GetByIdTest and fix SolrClient to escape ids properly
     add 243cf2c  LUCENE-9327 - drop useless casts in BaseXYShapeTestCase
     add fc53fd9  SOLR-14411, SOLR-14359 CHANGES entry fixes after 8.5.1 release
     add 03363f4  SOLR-14371 Zk StatusHandler should know about dynamic zk config (#1392)
     add 3af165b  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 113043b  LUCENE-9324: Add an ID to SegmentCommitInfo (#1434)
     add 9881dc0  Fix compiler warnings in tests
     add 1f1cdbf  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add f5d9139  SOLR-14391: getDocSet(Query[]) can use search(query,collector) Refactoring to simplify SolrIndexSearcher. ScoreFilter interface is obsolete now. Fixed #1409
     add f01c040  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 89e14fa  SOLR-13886: HDFSSyncSliceTest and SyncSliceTest started failing frequently
     add 37ad0e5  SolrMetricManager.registerMetric trivial opt
     add f914e08  LUCENE-9273: Speed up geometry queries by specialising Component2D spatial operations (#1341)
     add 58f9c79  SOLR-14412 zkRun+https (#1437)
     add c94770c  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 56c61e6  Remove dead code
     add c9cd623  SOLR-14421: Fix non-working examples in solr.in.cmd
     add e0c06ee  LUCENE-9191: make LineFileDocs random seeking more efficient by recording safe skip points in the concatenated gzip'd chunks
     add 2b6ae53  LUCENE-9337: Ensure CMS updates it's thread accounting datastructures consistently (#1443)
     add fbcb6ce  Configure notifications.
     add 5d60ff4  SOLR-12845: Add a default autoscaling cluster policy.
     add fe05a6d  SOLR-14420 Declare ServletRequests as HttpRequests in AuthenticationPlugin (#1442)
     add 950a34c  SOLR-14412 use parameterized logging
     add 4a98918  LUCENE-9339: Only call MergeScheduler when we actually found new merges (#1445)
     add 3a743ea  SOLR-8998,SOLR-12490: Polishing reference guide.
     add e43b179  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 2a7ba5a  SOLR-12845: Properly clear default policy between tests.
     add 4eb755d  SOLR-12690: Regularize LoggerFactory declarations. Fixing an incorrect change
     add a11b78e  LUCENE-9342: Collector's totalHitsThreshold should not be lower than numHits (#1448)
     add c7697b0  LUCENE-9344: Convert .txt files to properly formatted .md files (#1449)
     add 75b648c  LUCENE-9344: Use https url for lucene.apache.org
     add 83018de  Ensure we use a sane IWC for tests adding many documents.
     add ed3caab  LUCENE-9338: Clean up type safety in SimpleBindings (#1444)
     add f6462ee  LUCENE-9340: Deprecate SimpleBindings#add(SortField) (#1447)
     add 5eb117f  LUCENE-9340: Remove deprecated SimpleBindings#add(SortField) method
     add d7e0b90  LUCENE-9345: Separate MergeSchedulder from IndexWriter (#1451)
     add 849fb63  Move audit logging docs under AAA section (#1452)
     add 013e983  LUCENE-9267 Replace getQueryBuildTime time unit from ms to ns
     add 8059eea  Consolidate all IW locking inside IndexWriter (#1454)
     add ecc98e8  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 8867f46  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add ce18505  Fix Typo in `cloud-scripts` path (#1458)
     add f03e6aa  SOLR-14429: Convert .txt files to properly formatted .md files (#1450)
     add 13bbe60  LUCENE-9344: update file names (MIGRATE.txt, BUILD.txt => MIGRATE.md, BUILD.md)
     add 5d5b7e1  LUCENE-9314: Use SingletonDocumentBatch in monitor when we only have a single document
     add 64eed9a  LUCENE-9347: Add support for forbiddenapis 3.0 (#1459)
     add ea46596  SOLR-13942: /api/cluster/zk/* to fetch raw ZK data
     add b25eabe  SOLR-13942: A read API at /api/cluster/zk/* to fetch raw ZK data and view contents of a ZK direcory
     add ff43636  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add bc4da80  Fix visibility on member variables in IndexWriter and friends (#1460)
     add 960610a  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add daeaffa  SOLR-14433: Improve SolrShardReporter default metrics list (#1453)
     add 2d07439  Include time unit in SolrCLI's TimeoutException wording.
     add 0fc5179  SOLR-14412 only set ssl props when ssl enabled
     add f4eb586  SOLR-14173: Ref Guide Redesign: upgrade bootstrap; change layout; consolidate CSS. See issue for list of changes.
     add 2dd92fc  Solr GraphTermsQParser simplifications (#1405)
     add 59a8e83  LUCENE-9089: update FST usage example
     add 267d70b  LUCENE-9349: TermInSetQuery should use consumeMatchingTerms in visit() (#1465)
     add 28e7479  SOLR-14173: Change left nav item highlighting to fix menu jumpiness when hovering/selecting
     add 6e96d01  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 0c682d0  SOLR-14237: A new panel with security info in admin UI's dashboard
     add 561e366  SOLR-14237: A new panel with security info in admin UI's dashboard
     add 5e6d91e  SOLR-14173: Don't use JQuery-Slim as it breaks the sidebar sub-menu system.
     add 207d240  Fix tests to survive nightly runs with many documents
     add ddd8fa9  update CHANGES for #807
     add 5354f7e  LUCENE-9333: Add gradle task to compile changes.txt to a html (#1468)
     add 26c9fce  LUCENE-9278: concatenate paths for sourcepath using path separator rather than whitespace (which causes invalid option to be passed to javadoc).
     add 9ed5b6a  SOLR-14237: Fix HDFS nightly test failure
     add 9ae05e9  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 34c9fe4  LUCENE-9331: Make TestIndexWriterDelete#testDeletesOnDiskFull converge faster.
     add 26b0b54  LUCENE-9278: Fix javadocs task to work on windows and with whitespace in project folder (#1476)
     add 217c2fa  LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects
     add 7b289d61 SOLR-14440 Cert Auth plugin (#1463)
     add a5c73d3  Revert "SOLR-14440 CertAuth plugin (#1463)"
     add 242f48a  SOLR-14440 Cert Auth plugin
     add 7a849f6  LUCENE-9354: Sync French stop words with latest version from Snowball. (#1474)
     add 951efc9  LUCENE-9278: Improved options file creation: All parameters are escaped automatically, arguments don't need to be strings (they are converted during building options file) (#1479)
     add 96c47bc  LUCENE-9087: Build always trees with full leaves and lower the default value for maxPointsPerLeafNode to 512
     add e7c7a62  SOLR-14351: Oops; add back null check for ZkController
     add 0c58687  LUCENE-9348: Add a base grouping test for use with different GroupSelector implementations (#1461)
     add 5eea489  SOLR-14431: SegmentsInfoRequestHandler does not release IndexWriter.
     add 9c3b2b6  SOLR-14400: DirectUpdateHandler2 no longer needs to override getSolrMetricsContext
     add b810831  SOLR-14400: SuggestComponent can use parent class' SolrMetricsContext
     add 1783c4a  LUCENE-9191: ensure LineFileDocs random seeking effort does not seek into the middle of a multi-byte UTF-8 encoded Unicode character
     add 6f775bf  SOLR-14014 Allow disabling AdminUI at launch (#1471)
     add 9ee8aa6  LUCENE-9278: Fix passing of Java properties for locale: The arguments must be separated.
     add d4dbd0b  SOLR-14173: Add entry in CHANGES.txt
     add c6d4aea  LUCENE-9350: Don't hold references to large automata on FuzzyQuery (#1467)
     add e286638  LUCENE-9350: Add changes entry
     add 5834992  LUCENE-7822: CodecUtil#checkFooter should throw a CorruptIndexException as the main exception. (#1482)
     add d06294e  LUCENE-9366: Remove unused maxDoc parameter from DocValues.emptySortedNumeric() (#1491)
     add 28e4754  Bugfix for FuzzyQuery false negative (#1493)
     add 31b350e  SOLR-14426 Move auxiliary classes to nested classes (#1487)
     add 4c408a5  LUCENE-9362: Fix rewriting check in ExpressionValueSource (#1485)
     add 726894f  Revert "Bugfix for FuzzyQuery false negative (#1493)" (#1495)
     add 03a6023  SOLR-14465: Solr query handling code catches FuzzyTermsException
     add 30ba8de  LUCENE-9363: Only assert for no merging segments we merges are disabled
     add caa2042  Ensure nightly doesn't cause timeouts
     add 4a76a59  SOLR-14466: Upgrade log4j2 to latest release (2.13.2)
     add d9f9d6d  SOLR-13289: Add Support for BlockMax WAND (#1456)
     add 15be0db  SOLR-11934: Visit Solr logging, it's too noisy.
     add aeb9f6c  SOLR-7880: Update commons-cli to 1.4
     add bd004d2  Fix test to check for close / closing and wait for merge threads to close the writer if concurrency strikes
     add 7c350d2  LUCENE-7889: Allow grouping on Double/LongValuesSource (#1484)
     add 0d20c7b  LUCENE-9358: remove unnecessary tree rotation for the one dimensional case (#1481)
     add a0e158c3 SOLR-14266: Fix or suppress 14 resource leak warnings in apache/solr/core
     add 6971244  SOLR-14463: Solr Admin ZkStatus page now works with ZK 3.6 (#1499)
     add 4680e92  SOLR-14423: Move static SolrClientCache from StreamHandler to CoreContainer for wider reuse and better life-cycle management.
     add adddab9d SOLR-14456: Fix Content-Type header forwarding on compressed requests (#1480)
     add e4dc9e9  SOLR-11934: Visit Solr logging, it's too noisy. (added collection to log messages 'Registered new searcher...'
     add dd4fa8f  SOLR-14423: Additional fixes for object caching and incorrect test assumptions.
     add 1e449e3  SOLR-12131: ExternalRoleRuleBasedAuthorizationPlugin (#341)
     add 329e7c7  LUCENE-9033 Update ReleaseWizard for new website instructions (#1324)
     add 687dd42  SOLR-14475: Fix deprecation warnings resulting from upgrading commons cli to 1.4
     add 08841b6  Mistakenly checked in gradle/defaults-java.gradle with warning limit of 10000
     add 4b9808a  SOLR-14351: commitScheduler was missing MDC logging (#1498)
     add 1efce54  RegEx querying - add support for Java’s predefined character classes like \d for digits (#1489)
     add 18bd297  Lucene-9336: Changes.txt and migrate.md addition for RegExp enhancements (#1515)
     add 010168c  LUCENE-9321, LUCENE-9278: Refactor renderJavadoc to allow relative links with multiple Gradle tasks (#1488)
     add fe21359  SOLR-14407: Handle shards.purpose in the postlogs tool
     add f1db56a  SOLR-14478: Allow the diff Stream Evaluator to operate on the rows of a matrix
     add 08360a2  SOLR-14407, SOLR-14478: Update CHANGES.txt
     add 5eea975  RefGuide typo
     add 54dca80  SOLR-14471: Fix last-place replica after shards.preference rules  (#1507)
     add 4e56407  SOLR-14471: Add CHANGES entry
     add 819e668  Lucene 9370: Remove any leniency around use of backslashes in expressions as per the Java Pattern policy. (#1516)
     add 98ef96c  LUCENE-9288: poll_mirrors.py release script can handle HTTPS mirrors (#1520)
     add 34e5e6c  SOLR-14485: Fix or suppress 11 resource leak warnings in apache/solr/cloud
     add 803aad9  SOLR-8394: /admin/luke didn't computeindexHeapUsageBytes (#1497)
     add cab4e7d  added releases for Lucene/Solr 7.7.3
     add 88aff5d  LUCENE-9232: Fix or suppress 13 resource leak precommit warnings in lucene/replicator
     add c2b59f1  LUCENE-9321: Use @CompileClasspath annotation instead of @ClassPath, as only signatures are relevant for Javadocs
     add eebe40a  LUCENE-9372: gradlew does not run on cygwin (Peter Barna via Dawid Weiss)
     add 06df50e  LUCENE-9321: Port markdown task to Gradle (#1477)
     add b7c60e5  LUCENE-9321: Lazy evaluate project properties in the render-javadocs / changes2html task, also make URL (as its passed on command line) an input of task
     add 10d1ecb  LUCENE-9333: lazily evaluate for 'docroot' property
     add 51c15b8  LUCENE-9333: Use DirectoryProperty instead of directly having Provider; move script file to input field.
     new 3cb8ebb  SOLR-14473: Improve Overseer Javadoc (#1510)
     new 648e068  SOLR-14472: Autoscale "cores": use metrics to count Also counts all cores (lazy, transient), although currently impossible to use these in SolrCloud.
     new e0e50b9  SOLR-14472: missed CHANGES.txt
     new dd08642  SOLR-14476: Add percentiles and standard deviation aggregations to stats, facet and timeseries Streaming Expressions
     new d68673d  SOLR-14476: Fix precommit
     new 8f2fc5e  Lucene-9371: Allow external access to RegExp's parsed structure (#1521)
     new 51c8e07  SOLR-14486: Autoscaling simulation framework should stop using /clusterstate.json.
     new cd6b9b9  SOLR-14484: avoid putting null into MDC Co-authored-by: Andras Salamon
     new d00f79c  LUCENE-9374: Add checkBrokenLinks gradle task (#1522)
     new 46b617e  SOLR-14492: Fix ArrayIndexOutOfBoundsException in json.facet 'terms' when FacetFieldProcessorByHashDV is used with aggregations over multivalued numeric fields
     new 63d9cff  SOLR-14482: Fix or suppress warnings in solr/search/facet
     new 17592d2  LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search
     new aec740d  SOLR-14504: ZkController LiveNodesListener has NullPointerException in startup race.
     new dfa3a96  SOLR-13289: Use the final collector's scoreMode (#1517)
     new ea36cb5  SOLR-13289: Rename minExactHits to minExactCount (#1511)
     new 631f4a9  SOLR-13289: Add Refguide changes (#1501)
     new 57456a9  SOLR-14461: Replace commons-fileupload with Jetty (#1490)
     new a00d7eb  SOLR-13325: Add a collection selector to ComputePlanAction (#1512)
     new 1ef077f  LUCENE-9330: Make SortFields responsible for index sorting and serialization (#1440)
     new 8e475cc  SOLR-14443: Make SolrLogPostTool resilient to odd requests (#1525)
     new cf98a61  SOLR-14495: Fix or suppress warnings in solr/search/function
     new 3731d71  SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue.
     new aa78758  SOLR-14280: SolrConfig error handling improvements
     new b93a1cd  DOAP changes for release 8.5.2
     new 09fa2a1  SOLR-14474: Fix remaining auxilliary class warnings in Solr
     new 3fe52dd  LUCENE-9380: Fix auxiliary class warnings in Lucene
     new 3e00bf9  Add bugfix version 8.5.2
     new 607cc54  Add back-compat indices for 8.5.2
     new abd1f6a  SOLR-14498: BlockCache gets stuck not accepting new stores. Fix gradle :solr:core:validateJarChecksums
     new cebb441  SOLR-14498: BlockCache gets stuck not accepting new stores fixing checksums
     new 8fc28ef  SOLR-14237: Fix an error on admin UI due to improper variable handling
     new de15321  SOLR-14498: BlockCache gets stuck not accepting new stores. Fix gradle check
     new f833cb5  SOLR-11934: REVERT addition of collection to log message This reverts commit e4dc9e94
     new d7762e9  SOLR-14511: Documented node.sysprop shard preference (#1536)
     new 6e2cdcc  SOLR-14419: adding {param:ref} to Query DSL
     new 2b7d278  ref_guide - metrics reporting - small typo (#1544)
     new daf7160  LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)
     new 8cfa6a0  Revert "LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)"
     new 2a224cb  LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)
     new 793c1a1c LUCENE-9359: Address test failures when the codec version gets modified.
     new 0f545d7  SOLR-14494: Refactor BlockJoin to not use Filter (#1523)
     new 545dcc1  SOLR-14519:Fix or suppress warnings in solr/cloud/autoscaling
     new 270bdc1  SOLR-14491: Intercepting internode requests in KerberosPlugin when HTTP/2 client is used
     new b20ceb2  LUCENE-9301: include build time and user name only in non-snapshot builds so that jars are not recompiled on each build in development.
     new cacdc86  SOLR-14517 Obey "mm" local param on edismax queries with operators (#1540)
     new 4e7c17e  Revert "Revert "LUCENE-8962""
     new 107fc17  Fix case where mergeOnCommit would attempt to delete files twice in the presence of deletions

The 47 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .asf.yaml                                          |    8 +-
 .gitattributes                                     |    3 +
 .github/workflows/gradle-precommit.yml             |    2 +
 .github/workflows/gradle-wrapper-validation.yml    |   11 -
 build.gradle                                       |   46 +-
 .../apache/lucene/gradle/WrapperDownloader.java    |  129 +
 dev-tools/doap/lucene.rdf                          |   28 +
 dev-tools/doap/solr.rdf                            |   28 +
 dev-tools/scripts/README.md                        |   12 -
 dev-tools/scripts/addBackcompatIndexes.py          |    5 -
 dev-tools/scripts/buildAndPushRelease.py           |    4 +-
 dev-tools/scripts/createPatch.py                   |  143 -
 dev-tools/scripts/create_line_file_docs.py         |  247 +
 dev-tools/scripts/poll-mirrors.py                  |   20 +-
 dev-tools/scripts/prep-solr-ref-guide-rc.sh        |   89 -
 dev-tools/scripts/publish-solr-ref-guide.sh        |   58 -
 dev-tools/scripts/releaseWizard.py                 |  156 +-
 dev-tools/scripts/releaseWizard.yaml               |  374 +-
 dev-tools/scripts/smokeTestRelease.py              |    6 +-
 dev-tools/scripts/svnBranchToGit.py                |  797 ---
 gradle/defaults-idea.gradle                        |   29 -
 gradle/defaults-javadoc.gradle                     |   77 -
 gradle/documentation/changes-to-html.gradle        |   94 +
 gradle/documentation/documentation.gradle          |   78 +
 gradle/documentation/markdown.gradle               |  204 +
 gradle/generation/snowball.gradle                  |    2 +-
 gradle/help.gradle                                 |    3 +-
 gradle/ide/intellij-idea.gradle                    |   53 +
 gradle/jar-manifest.gradle                         |   89 +
 gradle/render-javadoc.gradle                       |  302 ++
 gradle/testing/randomization.gradle                |    9 -
 gradle/validation/check-broken-links.gradle        |   71 +
 gradle/validation/forbidden-apis/defaults.all.txt  |    2 +-
 gradle/validation/git-status.gradle                |   39 +-
 gradle/validation/jar-checks.gradle                |    2 +-
 gradle/validation/missing-docs-check.gradle        |    4 +-
 gradle/validation/precommit.gradle                 |    8 +-
 gradle/validation/rat-sources.gradle               |    1 +
 gradle/validation/validate-log-calls.gradle        |  238 +
 gradle/wrapper/gradle-wrapper.jar                  |  Bin 55616 -> 58702 bytes
 gradle/wrapper/gradle-wrapper.jar.sha256           |    1 +
 gradle/wrapper/gradle-wrapper.jar.version          |    1 +
 gradlew                                            |   15 +-
 gradlew.bat                                        |    9 +-
 help/ant.txt                                       |    2 +-
 help/validateLogCalls.txt                          |   74 +
 lucene/BUILD.md                                    |   92 +
 lucene/BUILD.txt                                   |   90 -
 lucene/CHANGES.txt                                 |  200 +-
 lucene/JRE_VERSION_MIGRATION.md                    |   39 +
 lucene/JRE_VERSION_MIGRATION.txt                   |   39 -
 lucene/MIGRATE.md                                  |  289 ++
 lucene/MIGRATE.txt                                 |  251 -
 lucene/README.md                                   |   23 +
 lucene/README.txt                                  |   23 -
 lucene/SYSTEM_REQUIREMENTS.md                      |   18 +
 lucene/SYSTEM_REQUIREMENTS.txt                     |   18 -
 lucene/analysis/common/build.gradle                |    2 +
 .../ar/ArabicNormalizationFilterFactory.java       |    5 +
 .../analysis/ar/ArabicStemFilterFactory.java       |    5 +
 .../analysis/bg/BulgarianStemFilterFactory.java    |    5 +
 .../bn/BengaliNormalizationFilterFactory.java      |    5 +
 .../analysis/bn/BengaliStemFilterFactory.java      |    5 +
 .../boost/DelimitedBoostTokenFilterFactory.java    |    5 +
 .../analysis/br/BrazilianStemFilterFactory.java    |    5 +
 .../charfilter/HTMLStripCharFilterFactory.java     |    5 +
 .../charfilter/MappingCharFilterFactory.java       |    5 +
 .../analysis/cjk/CJKBigramFilterFactory.java       |    5 +
 .../lucene/analysis/cjk/CJKWidthFilterFactory.java |    5 +
 .../ckb/SoraniNormalizationFilterFactory.java      |    5 +
 .../analysis/ckb/SoraniStemFilterFactory.java      |    5 +
 .../commongrams/CommonGramsFilterFactory.java      |    5 +
 .../commongrams/CommonGramsQueryFilterFactory.java |    5 +
 .../DictionaryCompoundWordTokenFilterFactory.java  |    5 +
 .../HyphenationCompoundWordTokenFilterFactory.java |    5 +
 .../analysis/core/DecimalDigitFilterFactory.java   |    5 +
 .../analysis/core/FlattenGraphFilterFactory.java   |    5 +
 .../analysis/core/KeywordTokenizerFactory.java     |    5 +
 .../analysis/core/LetterTokenizerFactory.java      |    5 +
 .../analysis/core/LowerCaseFilterFactory.java      |    5 +
 .../lucene/analysis/core/StopFilterFactory.java    |    5 +
 .../analysis/core/TypeTokenFilterFactory.java      |    5 +
 .../analysis/core/UpperCaseFilterFactory.java      |    5 +
 .../analysis/core/WhitespaceTokenizerFactory.java  |    5 +
 .../lucene/analysis/cz/CzechStemFilterFactory.java |    5 +
 .../analysis/de/GermanLightStemFilterFactory.java  |    5 +
 .../de/GermanMinimalStemFilterFactory.java         |    5 +
 .../de/GermanNormalizationFilterFactory.java       |    5 +
 .../analysis/de/GermanStemFilterFactory.java       |    5 +
 .../analysis/el/GreekLowerCaseFilterFactory.java   |    5 +
 .../lucene/analysis/el/GreekStemFilterFactory.java |    5 +
 .../en/EnglishMinimalStemFilterFactory.java        |    5 +
 .../en/EnglishPossessiveFilterFactory.java         |    5 +
 .../lucene/analysis/en/KStemFilterFactory.java     |    5 +
 .../analysis/en/PorterStemFilterFactory.java       |    5 +
 .../analysis/es/SpanishLightStemFilterFactory.java |    5 +
 .../es/SpanishMinimalStemFilterFactory.java        |    5 +
 .../analysis/fa/PersianCharFilterFactory.java      |    5 +
 .../fa/PersianNormalizationFilterFactory.java      |    5 +
 .../analysis/fi/FinnishLightStemFilterFactory.java |    5 +
 .../analysis/fr/FrenchLightStemFilterFactory.java  |    5 +
 .../fr/FrenchMinimalStemFilterFactory.java         |    5 +
 .../analysis/ga/IrishLowerCaseFilterFactory.java   |    5 +
 .../gl/GalicianMinimalStemFilterFactory.java       |    5 +
 .../analysis/gl/GalicianStemFilterFactory.java     |    5 +
 .../hi/HindiNormalizationFilterFactory.java        |    5 +
 .../lucene/analysis/hi/HindiStemFilterFactory.java |    5 +
 .../hu/HungarianLightStemFilterFactory.java        |    5 +
 .../hunspell/HunspellStemFilterFactory.java        |    5 +
 .../analysis/id/IndonesianStemFilterFactory.java   |    5 +
 .../in/IndicNormalizationFilterFactory.java        |    5 +
 .../analysis/it/ItalianLightStemFilterFactory.java |    5 +
 .../analysis/lv/LatvianStemFilterFactory.java      |    5 +
 .../analysis/minhash/MinHashFilterFactory.java     |    5 +
 .../miscellaneous/ASCIIFoldingFilterFactory.java   |    5 +
 .../miscellaneous/CapitalizationFilterFactory.java |    5 +
 .../miscellaneous/CodepointCountFilterFactory.java |    5 +
 .../ConcatenateGraphFilterFactory.java             |    5 +
 .../ConditionalTokenFilterFactory.java             |    5 +
 .../miscellaneous/DateRecognizerFilterFactory.java |    5 +
 .../DelimitedTermFrequencyTokenFilterFactory.java  |    5 +
 .../miscellaneous/FingerprintFilterFactory.java    |    5 +
 .../FixBrokenOffsetsFilterFactory.java             |    5 +
 .../HyphenatedWordsFilterFactory.java              |    7 +-
 .../miscellaneous/KeepWordFilterFactory.java       |    5 +
 .../miscellaneous/KeywordMarkerFilterFactory.java  |    5 +
 .../miscellaneous/KeywordRepeatFilterFactory.java  |    5 +
 .../miscellaneous/LengthFilterFactory.java         |    5 +
 .../LimitTokenCountFilterFactory.java              |    5 +
 .../LimitTokenOffsetFilterFactory.java             |    5 +
 .../LimitTokenPositionFilterFactory.java           |    5 +
 .../miscellaneous/ProtectedTermFilterFactory.java  |    5 +
 .../RemoveDuplicatesTokenFilterFactory.java        |    5 +
 .../ScandinavianFoldingFilterFactory.java          |    5 +
 .../ScandinavianNormalizationFilterFactory.java    |    5 +
 .../StemmerOverrideFilterFactory.java              |    5 +
 .../analysis/miscellaneous/TrimFilterFactory.java  |    5 +
 .../miscellaneous/TruncateTokenFilterFactory.java  |    5 +
 .../miscellaneous/TypeAsSynonymFilterFactory.java  |    5 +
 .../miscellaneous/WordDelimiterFilterFactory.java  |    5 +
 .../WordDelimiterGraphFilterFactory.java           |    5 +
 .../analysis/ngram/EdgeNGramFilterFactory.java     |    5 +
 .../analysis/ngram/EdgeNGramTokenizerFactory.java  |    5 +
 .../lucene/analysis/ngram/NGramFilterFactory.java  |   10 +-
 .../analysis/ngram/NGramTokenizerFactory.java      |    5 +
 .../no/NorwegianLightStemFilterFactory.java        |    5 +
 .../no/NorwegianMinimalStemFilterFactory.java      |    5 +
 .../path/PathHierarchyTokenizerFactory.java        |    7 +-
 .../pattern/PatternCaptureGroupFilterFactory.java  |    6 +
 .../pattern/PatternReplaceCharFilterFactory.java   |    5 +
 .../pattern/PatternReplaceFilterFactory.java       |    5 +
 .../analysis/pattern/PatternTokenizerFactory.java  |    5 +
 .../SimplePatternSplitTokenizerFactory.java        |    5 +
 .../pattern/SimplePatternTokenizerFactory.java     |    5 +
 .../DelimitedPayloadTokenFilterFactory.java        |    5 +
 .../payloads/NumericPayloadTokenFilterFactory.java |    5 +
 .../TokenOffsetPayloadTokenFilterFactory.java      |    5 +
 .../payloads/TypeAsPayloadTokenFilterFactory.java  |    5 +
 .../pt/PortugueseLightStemFilterFactory.java       |    5 +
 .../pt/PortugueseMinimalStemFilterFactory.java     |    5 +
 .../analysis/pt/PortugueseStemFilterFactory.java   |    5 +
 .../reverse/ReverseStringFilterFactory.java        |    5 +
 .../analysis/ru/RussianLightStemFilterFactory.java |    5 +
 .../shingle/FixedShingleFilterFactory.java         |    5 +
 .../analysis/shingle/ShingleFilterFactory.java     |    5 +
 .../snowball/SnowballPorterFilterFactory.java      |    5 +
 .../sr/SerbianNormalizationFilterFactory.java      |    7 +-
 .../analysis/standard/ClassicFilterFactory.java    |    5 +
 .../analysis/standard/ClassicTokenizerFactory.java |    5 +
 .../standard/StandardTokenizerFactory.java         |    5 +
 .../standard/UAX29URLEmailTokenizerFactory.java    |    5 +
 .../analysis/sv/SwedishLightStemFilterFactory.java |    5 +
 .../analysis/synonym/SynonymFilterFactory.java     |    5 +
 .../synonym/SynonymGraphFilterFactory.java         |    5 +
 .../lucene/analysis/th/ThaiTokenizerFactory.java   |    5 +
 .../analysis/tr/ApostropheFilterFactory.java       |    5 +
 .../analysis/tr/TurkishLowerCaseFilterFactory.java |    5 +
 .../analysis/util/AbstractAnalysisFactory.java     |   50 +-
 .../lucene/analysis/util/AnalysisSPILoader.java    |   37 +-
 .../lucene/analysis/util/CharFilterFactory.java    |    7 +-
 .../lucene/analysis/util/ElisionFilterFactory.java |    5 +
 .../lucene/analysis/util/TokenFilterFactory.java   |    7 +-
 .../lucene/analysis/util/TokenizerFactory.java     |    7 +-
 .../wikipedia/WikipediaTokenizerFactory.java       |    5 +
 .../lucene/analysis/snowball/french_stop.txt       |   20 +-
 .../apache/lucene/analysis/core/TestFactories.java |    4 +-
 .../lucene/analysis/core/TestRandomChains.java     |    3 +
 .../analysis/util/TestAbstractAnalysisFactory.java |    6 +-
 lucene/analysis/icu/build.gradle                   |    2 +
 .../analysis/icu/ICUFoldingFilterFactory.java      |    5 +
 .../icu/ICUNormalizer2CharFilterFactory.java       |    5 +
 .../analysis/icu/ICUNormalizer2FilterFactory.java  |    5 +
 .../analysis/icu/ICUTransformFilterFactory.java    |    5 +
 .../icu/segmentation/ICUTokenizerFactory.java      |    5 +
 lucene/analysis/kuromoji/build.gradle              |    2 +
 .../analysis/ja/JapaneseBaseFormFilterFactory.java |    5 +
 .../ja/JapaneseIterationMarkCharFilterFactory.java |    5 +
 .../ja/JapaneseKatakanaStemFilterFactory.java      |    5 +
 .../analysis/ja/JapaneseNumberFilterFactory.java   |    5 +
 .../ja/JapanesePartOfSpeechStopFilterFactory.java  |    5 +
 .../ja/JapaneseReadingFormFilterFactory.java       |    5 +
 .../analysis/ja/JapaneseTokenizerFactory.java      |    5 +
 .../lucene/analysis/ja/util/DictionaryBuilder.java |    7 +-
 .../apache/lucene/analysis/ja/TestFactories.java   |    4 +-
 lucene/analysis/morfologik/build.gradle            |    2 +
 .../morfologik/MorfologikFilterFactory.java        |    5 +
 lucene/analysis/nori/build.gradle                  |    2 +
 .../analysis/ko/KoreanNumberFilterFactory.java     |    5 +
 .../ko/KoreanPartOfSpeechStopFilterFactory.java    |    5 +
 .../ko/KoreanReadingFormFilterFactory.java         |    5 +
 .../lucene/analysis/ko/KoreanTokenizerFactory.java |    5 +
 lucene/analysis/opennlp/build.gradle               |    2 +
 .../opennlp/OpenNLPChunkerFilterFactory.java       |    5 +
 .../opennlp/OpenNLPLemmatizerFilterFactory.java    |    5 +
 .../analysis/opennlp/OpenNLPPOSFilterFactory.java  |    5 +
 .../analysis/opennlp/OpenNLPTokenizerFactory.java  |    5 +
 lucene/analysis/phonetic/build.gradle              |    2 +
 .../phonetic/BeiderMorseFilterFactory.java         |    5 +
 .../DaitchMokotoffSoundexFilterFactory.java        |    5 +
 .../phonetic/DoubleMetaphoneFilterFactory.java     |    5 +
 .../analysis/phonetic/PhoneticFilterFactory.java   |    5 +
 lucene/analysis/smartcn/build.gradle               |    2 +
 .../cn/smart/HMMChineseTokenizerFactory.java       |    5 +
 lucene/analysis/stempel/build.gradle               |    2 +
 .../stempel/StempelPolishStemFilterFactory.java    |    5 +
 lucene/backward-codecs/build.gradle                |    2 +
 .../codecs/lucene70/Lucene70SegmentInfoFormat.java |  281 +
 .../lucene/codecs/lucene70/package-info.java       |   22 +
 .../lucene/codecs/lucene84/Lucene84Codec.java      |  178 +
 .../lucene/codecs/lucene84/package-info.java       |   22 +
 .../services/org.apache.lucene.codecs.Codec        |    1 +
 .../lucene70/Lucene70RWSegmentInfoFormat.java      |  204 +
 .../lucene70/TestLucene70SegmentInfoFormat.java    |   42 +
 .../lucene/index/TestBackwardsCompatibility.java   |   39 +-
 .../org/apache/lucene/index/index.8.5.0-cfs.zip    |  Bin 0 -> 15909 bytes
 .../org/apache/lucene/index/index.8.5.0-nocfs.zip  |  Bin 0 -> 15901 bytes
 .../org/apache/lucene/index/index.8.5.1-cfs.zip    |  Bin 0 -> 15880 bytes
 .../org/apache/lucene/index/index.8.5.1-nocfs.zip  |  Bin 0 -> 15872 bytes
 .../org/apache/lucene/index/index.8.5.2-cfs.zip    |  Bin 0 -> 15897 bytes
 .../org/apache/lucene/index/index.8.5.2-nocfs.zip  |  Bin 0 -> 15902 bytes
 .../test/org/apache/lucene/index/sorted.8.5.0.zip  |  Bin 0 -> 166476 bytes
 .../test/org/apache/lucene/index/sorted.8.5.1.zip  |  Bin 0 -> 394412 bytes
 .../test/org/apache/lucene/index/sorted.8.5.2.zip  |  Bin 0 -> 80768 bytes
 lucene/benchmark/build.gradle                      |    2 +
 .../benchmark/byTask/tasks/CreateIndexTask.java    |    4 +-
 lucene/build.gradle                                |    2 +
 lucene/build.xml                                   |   10 +-
 lucene/classification/build.gradle                 |    2 +
 lucene/codecs/build.gradle                         |    2 +
 .../simpletext/SimpleTextCompoundFormat.java       |   31 +-
 .../simpletext/SimpleTextSegmentInfoFormat.java    |  306 +-
 .../lucene/codecs/uniformsplit/FSTDictionary.java  |   36 +-
 .../lucene/codecs/uniformsplit/FieldMetadata.java  |   23 +-
 .../codecs/uniformsplit/IndexDictionary.java       |    3 +-
 .../uniformsplit/UniformSplitPostingsFormat.java   |   24 +-
 .../codecs/uniformsplit/UniformSplitTerms.java     |    9 -
 .../uniformsplit/UniformSplitTermsReader.java      |   37 +-
 .../sharedterms/STUniformSplitPostingsFormat.java  |   21 +-
 .../sharedterms/STUniformSplitTerms.java           |    3 +-
 .../sharedterms/STUniformSplitTermsReader.java     |   24 +-
 .../sharedterms/UnionFieldMetadataBuilder.java     |    8 +-
 .../simpletext/TestSimpleTextCompoundFormat.java   |    5 +
 .../codecs/uniformsplit/TestFSTDictionary.java     |    3 +-
 .../TestUniformSplitPostingFormat.java             |   26 +-
 .../TestSTUniformSplitPostingFormat.java           |   12 +-
 lucene/common-build.xml                            |   11 +-
 lucene/core/build.gradle                           |    3 +-
 .../src/java/org/apache/lucene/codecs/Codec.java   |    2 +-
 .../java/org/apache/lucene/codecs/CodecUtil.java   |   21 +-
 .../apache/lucene/codecs/CompoundDirectory.java    |   83 +
 .../org/apache/lucene/codecs/CompoundFormat.java   |    4 +-
 .../apache/lucene/codecs/DocValuesConsumer.java    |    2 +-
 .../lucene/codecs/MultiLevelSkipListReader.java    |    8 +-
 .../codecs/blocktree/BlockTreeTermsReader.java     |   27 +-
 .../codecs/lucene50/Lucene50CompoundFormat.java    |    3 +-
 .../codecs/lucene50/Lucene50CompoundReader.java    |   49 +-
 .../codecs/lucene70/Lucene70SegmentInfoFormat.java |  439 --
 .../lucene/codecs/lucene70/package-info.java       |   22 -
 .../lucene/codecs/lucene84/Lucene84Codec.java      |  178 -
 .../lucene/codecs/lucene84/package-info.java       |  396 +-
 .../lucene/codecs/lucene86/Lucene86Codec.java      |  178 +
 .../codecs/lucene86/Lucene86SegmentInfoFormat.java |  217 +
 .../lucene/codecs/lucene86/package-info.java       |  416 ++
 .../codecs/perfield/PerFieldDocValuesFormat.java   |    3 +
 .../document/LatLonShapeBoundingBoxQuery.java      |  477 +-
 .../apache/lucene/document/LatLonShapeQuery.java   |   99 +-
 .../org/apache/lucene/document/ShapeField.java     |   34 +
 .../org/apache/lucene/document/ShapeQuery.java     |   15 +-
 .../org/apache/lucene/document/XYShapeQuery.java   |  109 +-
 .../src/java/org/apache/lucene/geo/Circle2D.java   |  169 +-
 .../java/org/apache/lucene/geo/Component2D.java    |   89 +-
 .../java/org/apache/lucene/geo/ComponentTree.java  |  101 +-
 .../src/java/org/apache/lucene/geo/Line2D.java     |   84 +-
 .../src/java/org/apache/lucene/geo/Point2D.java    |   45 +-
 .../src/java/org/apache/lucene/geo/Polygon2D.java  |  180 +-
 .../java/org/apache/lucene/geo/Rectangle2D.java    |  127 +-
 .../apache/lucene/index/BinaryDocValuesWriter.java |   29 +-
 .../org/apache/lucene/index/BufferedUpdates.java   |   35 +-
 .../apache/lucene/index/BufferedUpdatesStream.java |    5 +-
 .../lucene/index/ConcurrentMergeScheduler.java     |  103 +-
 .../apache/lucene/index/DefaultIndexingChain.java  |  185 +-
 .../java/org/apache/lucene/index/DocValues.java    |    4 +-
 .../apache/lucene/index/DocValuesLeafReader.java   |   89 +
 .../org/apache/lucene/index/DocValuesWriter.java   |    8 +-
 .../org/apache/lucene/index/DocumentsWriter.java   |  242 +-
 .../lucene/index/DocumentsWriterDeleteQueue.java   |   87 +-
 .../lucene/index/DocumentsWriterFlushControl.java  |  467 +-
 .../lucene/index/DocumentsWriterPerThread.java     |  281 +-
 .../lucene/index/DocumentsWriterPerThreadPool.java |  290 +-
 .../lucene/index/DocumentsWriterStallControl.java  |    5 +-
 .../lucene/index/FlushByRamOrCountsPolicy.java     |   26 +-
 .../java/org/apache/lucene/index/FlushPolicy.java  |   38 +-
 .../apache/lucene/index/FrozenBufferedUpdates.java |  297 +-
 .../org/apache/lucene/index/IndexFileDeleter.java  |    4 +-
 .../java/org/apache/lucene/index/IndexSorter.java  |  448 ++
 .../java/org/apache/lucene/index/IndexWriter.java  |  791 ++-
 .../org/apache/lucene/index/IndexWriterConfig.java |   31 +-
 .../apache/lucene/index/LiveIndexWriterConfig.java |   16 -
 .../java/org/apache/lucene/index/MergePolicy.java  |   14 +-
 .../org/apache/lucene/index/MergeScheduler.java    |   42 +-
 .../org/apache/lucene/index/MultiDocValues.java    |    2 +-
 .../java/org/apache/lucene/index/MultiSorter.java  |  144 +-
 .../org/apache/lucene/index/NoMergeScheduler.java  |    2 +-
 .../lucene/index/NumericDocValuesWriter.java       |   33 +-
 .../org/apache/lucene/index/ReadersAndUpdates.java |   44 +-
 .../org/apache/lucene/index/SegmentCommitInfo.java |   40 +-
 .../apache/lucene/index/SegmentCoreReaders.java    |    3 +-
 .../java/org/apache/lucene/index/SegmentInfos.java |  240 +-
 .../org/apache/lucene/index/SegmentMerger.java     |    2 +-
 .../org/apache/lucene/index/SegmentReader.java     |    8 +
 .../apache/lucene/index/SerialMergeScheduler.java  |    6 +-
 .../org/apache/lucene/index/SortFieldProvider.java |  118 +
 .../apache/lucene/index/SortedDocValuesWriter.java |   50 +-
 .../lucene/index/SortedNumericDocValuesWriter.java |   33 +-
 .../lucene/index/SortedSetDocValuesWriter.java     |   45 +-
 .../src/java/org/apache/lucene/index/Sorter.java   |  238 +-
 .../search/ControlledRealTimeReopenThread.java     |   18 +-
 .../lucene/search/DocValuesRewriteMethod.java      |    2 +-
 .../apache/lucene/search/DoubleValuesSource.java   |   12 +-
 .../lucene/search/FuzzyAutomatonBuilder.java       |   88 +
 .../java/org/apache/lucene/search/FuzzyQuery.java  |   58 +-
 .../org/apache/lucene/search/FuzzyTermsEnum.java   |  151 +-
 .../org/apache/lucene/search/IndexSearcher.java    |   76 +-
 .../org/apache/lucene/search/MultiTermQuery.java   |    6 +-
 .../org/apache/lucene/search/QueryVisitor.java     |    5 +-
 .../lucene/search/QueueSizeBasedExecutor.java      |   60 +
 .../org/apache/lucene/search/SliceExecutor.java    |   80 +
 .../java/org/apache/lucene/search/SortField.java   |  135 +
 .../lucene/search/SortedNumericSortField.java      |  106 +
 .../apache/lucene/search/SortedSetSortField.java   |   69 +-
 .../org/apache/lucene/search/TermInSetQuery.java   |   22 +-
 .../apache/lucene/search/TopFieldCollector.java    |    4 +-
 .../apache/lucene/search/TopScoreDocCollector.java |    4 +-
 .../search/UsageTrackingQueryCachingPolicy.java    |    5 +
 .../apache/lucene/store/BufferedIndexInput.java    |  226 +-
 .../org/apache/lucene/store/NIOFSDirectory.java    |   34 +-
 .../src/java/org/apache/lucene/util/BitUtil.java   |  128 +-
 .../org/apache/lucene/util/ClassLoaderUtils.java   |   48 +
 .../org/apache/lucene/util/NamedSPILoader.java     |   29 +-
 .../org/apache/lucene/util/SPIClassIterator.java   |  168 -
 .../src/java/org/apache/lucene/util/Version.java   |   21 +
 .../lucene/util/automaton/CompiledAutomaton.java   |    4 +-
 .../org/apache/lucene/util/automaton/RegExp.java   |  329 +-
 .../java/org/apache/lucene/util/bkd/BKDWriter.java |  407 +-
 .../org/apache/lucene/util/fst/BitTableUtil.java   |  172 +
 .../src/java/org/apache/lucene/util/fst/FST.java   |  353 +-
 .../java/org/apache/lucene/util/fst/FSTEnum.java   |   17 +-
 .../java/org/apache/lucene/util/fst/NodeHash.java  |    2 +-
 .../src/java/org/apache/lucene/util/fst/Util.java  |   19 +-
 .../org/apache/lucene/util/fst/package-info.java   |   18 +-
 .../services/org.apache.lucene.codecs.Codec        |    2 +-
 .../org.apache.lucene.index.SortFieldProvider      |   20 +
 .../apache/lucene/TestMergeSchedulerExternal.java  |   20 +-
 .../org/apache/lucene/codecs/TestCodecUtil.java    |   12 +-
 ...tLucene50StoredFieldsFormatHighCompression.java |   11 +-
 .../codecs/lucene60/TestLucene60PointsFormat.java  |   21 +-
 .../lucene70/TestLucene70SegmentInfoFormat.java    |   35 -
 .../codecs/lucene80/TestLucene80NormsFormat.java   |    4 +-
 .../lucene86/TestLucene86SegmentInfoFormat.java    |   37 +
 .../perfield/TestPerFieldDocValuesFormat.java      |   40 +-
 .../lucene/document/BaseShapeEncodingTestCase.java |  119 +-
 .../apache/lucene/document/BaseShapeTestCase.java  |   95 +
 .../lucene/document/BaseXYShapeTestCase.java       |    4 +-
 .../document/TestLatLonLineShapeQueries.java       |   32 +-
 .../TestLatLonMultiPolygonShapeQueries.java        |   21 +-
 .../document/TestLatLonPointShapeQueries.java      |   14 +-
 .../document/TestLatLonPolygonShapeQueries.java    |   42 +-
 .../apache/lucene/document/TestLatLonShape.java    |   87 +-
 .../lucene/document/TestXYLineShapeQueries.java    |   30 +-
 .../lucene/document/TestXYPointShapeQueries.java   |   16 +-
 .../lucene/document/TestXYPolygonShapeQueries.java |   42 +-
 .../test/org/apache/lucene/geo/TestCircle2D.java   |   35 +-
 .../src/test/org/apache/lucene/geo/TestLine2D.java |   22 +-
 .../test/org/apache/lucene/geo/TestPoint2D.java    |   23 +-
 .../test/org/apache/lucene/geo/TestPolygon2D.java  |   38 +-
 .../org/apache/lucene/geo/TestRectangle2D.java     |   64 +-
 .../test/org/apache/lucene/geo/TestXYPoint.java    |    2 +-
 .../org/apache/lucene/index/TestAddIndexes.java    |    2 +-
 .../apache/lucene/index/TestBufferedUpdates.java   |   57 +
 .../lucene/index/TestConcurrentMergeScheduler.java |  120 +-
 .../src/test/org/apache/lucene/index/TestDoc.java  |    2 +-
 .../org/apache/lucene/index/TestFieldsReader.java  |    6 +-
 .../lucene/index/TestFlushByRamOrCountsPolicy.java |   90 +-
 .../apache/lucene/index/TestForceMergeForever.java |    4 +-
 .../apache/lucene/index/TestIndexFileDeleter.java  |    4 +-
 .../org/apache/lucene/index/TestIndexSorting.java  |    5 +-
 .../org/apache/lucene/index/TestIndexWriter.java   |  609 ++-
 .../apache/lucene/index/TestIndexWriterConfig.java |    1 -
 .../apache/lucene/index/TestIndexWriterDelete.java |   10 +-
 .../lucene/index/TestIndexWriterExceptions.java    |    6 +-
 .../lucene/index/TestIndexWriterExceptions2.java   |    6 +-
 .../lucene/index/TestIndexWriterMaxDocs.java       |    2 +-
 .../lucene/index/TestIndexWriterMerging.java       |    6 +-
 .../lucene/index/TestIndexWriterOnDiskFull.java    |    6 +-
 .../apache/lucene/index/TestIndexWriterReader.java |   15 +-
 .../index/TestIndexWriterThreadsToSegments.java    |    4 +-
 .../lucene/index/TestIndexWriterWithThreads.java   |    8 +-
 .../lucene/index/TestNRTReaderWithThreads.java     |    4 +-
 .../apache/lucene/index/TestNoMergeScheduler.java  |    2 +-
 .../lucene/index/TestNumericDocValuesUpdates.java  |  155 +
 .../index/TestOneMergeWrappingMergePolicy.java     |    2 +-
 .../apache/lucene/index/TestPendingDeletes.java    |    6 +-
 .../lucene/index/TestPendingSoftDeletes.java       |    2 +-
 .../apache/lucene/index/TestPerSegmentDeletes.java |   10 +-
 .../org/apache/lucene/index/TestPointValues.java   |    4 +-
 .../org/apache/lucene/index/TestSegmentInfos.java  |  103 +-
 .../org/apache/lucene/index/TestSegmentMerger.java |    2 +-
 .../index/TestSoftDeletesRetentionMergePolicy.java |   14 +-
 .../apache/lucene/index/TestTieredMergePolicy.java |    6 +-
 .../index/TestTragicIndexWriterDeadlock.java       |    8 +-
 .../apache/lucene/search/TermInSetQueryTest.java   |   42 +
 .../org/apache/lucene/search/TestBoolean2.java     |    8 +-
 .../search/TestControlledRealTimeReopenThread.java |    8 +-
 .../lucene/search/TestDoubleValuesSource.java      |   12 +-
 .../org/apache/lucene/search/TestFuzzyQuery.java   |   89 +-
 .../apache/lucene/search/TestIndexSearcher.java    |   94 +-
 .../apache/lucene/search/TestLRUQueryCache.java    |   17 +-
 .../org/apache/lucene/search/TestRegexpQuery.java  |   37 +-
 .../lucene/search/TestSameScoresWithThreads.java   |    1 +
 .../apache/lucene/search/TestSearcherManager.java  |    5 +-
 .../org/apache/lucene/search/TestTermQuery.java    |    7 +-
 .../apache/lucene/search/TestTopDocsCollector.java |   59 +-
 .../lucene/search/TestTopFieldCollector.java       |   58 +-
 .../TestUsageTrackingFilterCachingPolicy.java      |    9 +
 .../lucene/store/TestBufferedIndexInput.java       |  114 +-
 .../test/org/apache/lucene/util/TestBitUtil.java   |   87 -
 .../apache/lucene/util/TestClassLoaderUtils.java   |   38 +
 .../org/apache/lucene/util/TestOfflineSorter.java  |   17 +-
 .../apache/lucene/util/TestSPIClassIterator.java   |   38 -
 .../apache/lucene/util/automaton/TestRegExp.java   |  148 +
 .../test/org/apache/lucene/util/bkd/TestBKD.java   |   16 +-
 .../apache/lucene/util/fst/TestBitTableUtil.java   |  138 +
 .../lucene/util/fst/TestFSTDirectAddressing.java   |   85 +-
 lucene/default-nested-ivy-settings.xml             |    2 +-
 lucene/demo/build.gradle                           |    2 +
 .../lucene/demo/facet/DistanceFacetsExample.java   |    5 +-
 .../facet/ExpressionAggregationFacetsExample.java  |    6 +-
 lucene/expressions/build.gradle                    |    2 +
 .../lucene/expressions/ExpressionValueSource.java  |    5 +-
 .../apache/lucene/expressions/SimpleBindings.java  |   97 +-
 .../lucene/expressions/TestDemoExpressions.java    |   37 +-
 .../lucene/expressions/TestExpressionRescorer.java |    6 +-
 .../expressions/TestExpressionSortField.java       |   21 +-
 .../lucene/expressions/TestExpressionSorts.java    |   20 +-
 .../expressions/TestExpressionValidation.java      |   14 +-
 .../expressions/TestExpressionValueSource.java     |   75 +-
 lucene/facet/build.gradle                          |    1 +
 lucene/grouping/build.gradle                       |    2 +
 .../search/grouping/BlockGroupingCollector.java    |    4 +
 .../apache/lucene/search/grouping/DoubleRange.java |   59 +
 .../lucene/search/grouping/DoubleRangeFactory.java |   67 +
 .../search/grouping/DoubleRangeGroupSelector.java  |  100 +
 .../grouping/FirstPassGroupingCollector.java       |    1 +
 .../lucene/search/grouping/GroupSelector.java      |   10 +-
 .../lucene/search/grouping/GroupingSearch.java     |    8 +
 .../apache/lucene/search/grouping/LongRange.java   |   58 +
 .../lucene/search/grouping/LongRangeFactory.java   |   67 +
 .../search/grouping/LongRangeGroupSelector.java    |  101 +
 .../grouping/SecondPassGroupingCollector.java      |    1 +
 .../lucene/search/grouping/TermGroupSelector.java  |    4 +
 .../search/grouping/ValueSourceGroupSelector.java  |    6 +-
 .../lucene/search/grouping/package-info.java       |   23 +-
 .../search/grouping/AbstractGroupingTestCase.java  |   45 +
 .../search/grouping/BaseGroupSelectorTestCase.java |  365 ++
 .../lucene/search/grouping/BlockGroupingTest.java  |  225 +
 .../grouping/DoubleRangeGroupSelectorTest.java     |   59 +
 .../grouping/LongRangeGroupSelectorTest.java       |   59 +
 .../search/grouping/TermGroupSelectorTest.java     |   60 +
 .../search/grouping/TestDoubleRangeFactory.java    |   39 +
 .../search/grouping/TestLongRangeFactory.java      |   39 +
 .../grouping/ValueSourceGroupSelectorTest.java     |   51 +
 lucene/highlighter/build.gradle                    |    2 +
 .../search/uhighlight/MultiTermHighlighting.java   |    5 +-
 .../search/uhighlight/UnifiedHighlighter.java      |    2 +
 .../lucene/search/highlight/HighlighterTest.java   |   27 +-
 .../lucene/search/highlight/TokenSourcesTest.java  |    2 +
 .../highlight/custom/HighlightCustomQueryTest.java |   23 +-
 lucene/ivy-versions.properties                     |   19 +-
 lucene/join/build.gradle                           |    2 +
 .../jetty-continuation-9.4.24.v20191120.jar.sha1   |    1 -
 .../jetty-continuation-9.4.27.v20200227.jar.sha1   |    1 +
 .../licenses/jetty-http-9.4.24.v20191120.jar.sha1  |    1 -
 .../licenses/jetty-http-9.4.27.v20200227.jar.sha1  |    1 +
 lucene/licenses/jetty-io-9.4.24.v20191120.jar.sha1 |    1 -
 lucene/licenses/jetty-io-9.4.27.v20200227.jar.sha1 |    1 +
 .../jetty-server-9.4.24.v20191120.jar.sha1         |    1 -
 .../jetty-server-9.4.27.v20200227.jar.sha1         |    1 +
 .../jetty-servlet-9.4.24.v20191120.jar.sha1        |    1 -
 .../jetty-servlet-9.4.27.v20200227.jar.sha1        |    1 +
 .../licenses/jetty-util-9.4.24.v20191120.jar.sha1  |    1 -
 .../licenses/jetty-util-9.4.27.v20200227.jar.sha1  |    1 +
 lucene/licenses/log4j-api-2.11.2.jar.sha1          |    1 -
 lucene/licenses/log4j-api-2.13.2.jar.sha1          |    1 +
 lucene/licenses/log4j-core-2.11.2.jar.sha1         |    1 -
 lucene/licenses/log4j-core-2.13.2.jar.sha1         |    1 +
 .../morfologik-ukrainian-search-3.9.0.jar.sha1     |    1 -
 .../morfologik-ukrainian-search-4.9.1.jar.sha1     |    1 +
 lucene/luke/build.gradle                           |    2 +
 .../apache/lucene/luke/app/AbstractHandler.java    |    4 +-
 .../dialog/documents/AddDocumentDialogFactory.java |    2 +-
 .../dialog/menubar/ExportTermsDialogFactory.java   |    4 +-
 .../luke/models/documents/DocumentsImpl.java       |    4 +-
 .../apache/lucene/luke/models/util/IndexUtils.java |   14 +-
 .../util/twentynewsgroups/MessageFilesParser.java  |    2 +-
 .../luke/util/reflection/SubtypeCollector.java     |    2 +-
 .../luke/models/overview/OverviewImplTest.java     |    2 +-
 lucene/memory/build.gradle                         |    2 +
 lucene/misc/build.gradle                           |    2 +
 .../org/apache/lucene/index/IndexSplitter.java     |    2 +-
 .../java/org/apache/lucene/store/RAFDirectory.java |   18 +-
 .../org/apache/lucene/store/WindowsDirectory.java  |    5 +-
 lucene/monitor/build.gradle                        |    2 +
 .../org/apache/lucene/monitor/DocumentBatch.java   |   12 +-
 .../org/apache/lucene/monitor/MatchingQueries.java |    2 +-
 .../lucene/monitor/MultiMatchingQueries.java       |    2 +-
 .../lucene/monitor/TermFilteredPresearcher.java    |    7 +-
 .../apache/lucene/monitor/TestDocumentBatch.java   |   58 +
 .../apache/lucene/monitor/TestTermPresearcher.java |    2 +
 lucene/queries/build.gradle                        |    2 +
 .../function/docvalues/DocTermsIndexDocValues.java |   26 +-
 .../function/valuesource/DocFreqValueSource.java   |  175 +-
 .../function/valuesource/IDFValueSource.java       |    2 +-
 .../function/valuesource/MaxDocValueSource.java    |    2 +-
 .../function/valuesource/NumDocsValueSource.java   |    2 +-
 .../function/valuesource/QueryValueSource.java     |  121 +-
 .../function/valuesource/SortedSetFieldSource.java |    2 +-
 .../queries/function/TestSortedSetFieldSource.java |   15 +-
 .../lucene/queries/function/TestValueSources.java  |   17 +-
 lucene/queryparser/build.gradle                    |    2 +
 lucene/replicator/build.gradle                     |    2 +
 .../apache/lucene/replicator/nrt/PrimaryNode.java  |    2 +-
 .../lucene/replicator/ReplicatorTestCase.java      |    2 +
 .../lucene/replicator/nrt/TestNRTReplication.java  |   17 +-
 .../replicator/nrt/TestStressNRTReplication.java   |    6 +-
 lucene/sandbox/build.gradle                        |    2 +
 .../idversion/TestIDVersionPostingsFormat.java     |    2 +-
 .../document/TestFloatPointNearestNeighbor.java    |    2 +-
 .../test/org/apache/lucene/search/TestNearest.java |    2 +-
 .../lucene/search/TestTermAutomatonQuery.java      |    3 +-
 lucene/site/xsl/index.template.md                  |   50 +
 lucene/site/xsl/index.xsl                          |    6 +-
 lucene/spatial-extras/build.gradle                 |    2 +
 .../apache/lucene/spatial/StrategyTestCase.java    |    2 +-
 .../spatial/prefix/HeatmapFacetCounterTest.java    |    2 +-
 .../prefix/RandomSpatialOpFuzzyPrefixTreeTest.java |    2 +-
 lucene/spatial3d/build.gradle                      |    2 +
 .../spatial3d/Geo3DPointDistanceComparator.java    |    2 +-
 .../Geo3DPointOutsideDistanceComparator.java       |    2 +-
 .../org/apache/lucene/spatial3d/Geo3DUtil.java     |    6 +-
 .../apache/lucene/spatial3d/geom/PlanetModel.java  |   99 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java    |   31 +-
 lucene/suggest/build.gradle                        |    2 +
 .../analyzing/SuggestStopFilterFactory.java        |    5 +
 .../suggest/analyzing/TestFreeTextSuggester.java   |    1 +
 .../suggest/analyzing/TestSuggestStopFilter.java   |    9 -
 .../search/suggest/document/TestSuggestField.java  |   13 +-
 lucene/test-framework/build.gradle                 |    2 +
 .../lucene/codecs/cranky/CrankyCompoundFormat.java |    3 +-
 .../UniformSplitRot13PostingsFormat.java           |    8 +-
 .../STUniformSplitRot13PostingsFormat.java         |    4 +-
 .../apache/lucene/geo/BaseGeoPointTestCase.java    |    2 +-
 .../org/apache/lucene/geo/BaseXYPointTestCase.java |    2 +-
 .../lucene/index/BaseCompoundFormatTestCase.java   |   42 +
 .../lucene/index/BaseIndexFileFormatTestCase.java  |   22 +-
 .../lucene/index/BaseLiveDocsFormatTestCase.java   |    4 +-
 .../lucene/index/BaseMergePolicyTestCase.java      |   12 +-
 .../org/apache/lucene/index/RandomIndexWriter.java |   58 +-
 .../index/SuppressingConcurrentMergeScheduler.java |    6 +-
 .../ThreadedIndexingAndSearchingTestCase.java      |   19 +-
 .../lucene/search/ShardSearchingTestBase.java      |    3 +-
 .../apache/lucene/store/MockDirectoryWrapper.java  |   18 +-
 .../java/org/apache/lucene/util/LineFileDocs.java  |  110 +-
 .../org/apache/lucene/util/LuceneTestCase.java     |   28 +-
 .../util/TestRuleSetupAndRestoreClassEnv.java      |   29 +-
 .../src/java/org/apache/lucene/util/TestUtil.java  |    4 +-
 .../org/apache/lucene/util/europarl.lines.txt.gz   |  Bin 5730708 -> 9695474 bytes
 .../org/apache/lucene/util/europarl.lines.txt.seek |   19 +
 lucene/tools/forbiddenApis/base.txt                |    2 +-
 .../tools/src/groovy/check-source-patterns.groovy  |    4 +-
 solr/CHANGES.txt                                   |  261 +-
 solr/NOTICE.txt                                    |   15 +-
 solr/README.md                                     |  217 +
 solr/README.txt                                    |  189 -
 solr/bin/solr                                      |   16 +-
 solr/bin/solr.cmd                                  |   11 +
 solr/bin/solr.in.cmd                               |   20 +-
 solr/bin/solr.in.sh                                |    8 +-
 solr/build.gradle                                  |    2 +
 solr/build.xml                                     |   12 +-
 solr/contrib/analysis-extras/README.md             |   26 +
 solr/contrib/analysis-extras/README.txt            |   23 -
 solr/contrib/analysis-extras/build.gradle          |    2 +
 ...ExtractNamedEntitiesUpdateProcessorFactory.java |    2 +-
 solr/contrib/analytics/build.gradle                |    2 +
 .../apache/solr/analytics/ExpressionFactory.java   |   78 +-
 .../apache/solr/analytics/facet/PivotFacet.java    |   77 +-
 .../apache/solr/analytics/facet/QueryFacet.java    |    8 +-
 .../apache/solr/analytics/facet/RangeFacet.java    |    8 +-
 .../function/mapping/ComparisonFunction.java       |  320 +-
 .../function/mapping/DateMathFunction.java         |  171 +-
 .../function/mapping/DateParseFunction.java        |  261 +-
 .../mapping/DecimalNumericConversionFunction.java  |  281 +-
 .../analytics/function/mapping/EqualFunction.java  |  301 +-
 .../analytics/function/mapping/ExistsFunction.java |  143 +-
 .../function/mapping/FillMissingFunction.java      | 1283 ++---
 .../analytics/function/mapping/FilterFunction.java | 1075 ++--
 .../analytics/function/mapping/IfFunction.java     | 1268 ++---
 .../analytics/function/mapping/LambdaFunction.java | 4044 +++++++--------
 .../analytics/function/mapping/RemoveFunction.java | 1187 ++---
 .../function/mapping/ReplaceFunction.java          | 1519 +++---
 .../analytics/function/reduction/MaxFunction.java  |  405 +-
 .../function/reduction/MedianFunction.java         |  237 +-
 .../analytics/function/reduction/MinFunction.java  |  405 +-
 .../function/reduction/OrdinalFunction.java        |  489 +-
 .../function/reduction/PercentileFunction.java     |  453 +-
 .../stream/AnalyticsShardRequestManager.java       |    4 +-
 .../solr/analytics/util/FacetRangeGenerator.java   |  132 +-
 .../solr/analytics/util/MedianCalculator.java      |    6 +-
 .../solr/analytics/util/OldAnalyticsParams.java    |   34 +-
 .../util/OldAnalyticsRequestConverter.java         |    4 +-
 .../solr/analytics/util/OrdinalCalculator.java     |   15 +-
 .../org/apache/solr/handler/AnalyticsHandler.java  |    6 -
 .../analytics/legacy/facetWithDottedFields.txt     |   16 +
 .../solr/collection1/conf/schema-analytics.xml     |    2 +
 .../util/OldAnalyticsRequestConverterUnitTest.java |   64 +
 solr/contrib/clustering/{README.txt => README.md}  |    0
 solr/contrib/clustering/build.gradle               |    2 +
 .../handler/clustering/ClusteringComponent.java    |   20 +-
 .../clustering/carrot2/CarrotClusteringEngine.java |   20 +-
 .../carrot2/LuceneCarrot2StemmerFactory.java       |   10 +-
 .../clustering/carrot2/SolrResourceLocator.java    |    8 +-
 solr/contrib/dataimporthandler-extras/build.gradle |    2 +
 .../handler/dataimport/MailEntityProcessor.java    |  102 +-
 .../handler/dataimport/TikaEntityProcessor.java    |   18 +-
 solr/contrib/dataimporthandler/README.md           |   17 +
 solr/contrib/dataimporthandler/README.txt          |   16 -
 solr/contrib/dataimporthandler/build.gradle        |    2 +
 .../solr/handler/dataimport/BinURLDataSource.java  |    6 +-
 .../solr/handler/dataimport/DataImportHandler.java |    2 +-
 .../solr/handler/dataimport/DataImporter.java      |    4 +-
 .../apache/solr/handler/dataimport/DocBuilder.java |   34 +-
 .../handler/dataimport/EntityProcessorWrapper.java |    4 +-
 .../solr/handler/dataimport/FileDataSource.java    |   12 +-
 .../solr/handler/dataimport/JdbcDataSource.java    |   19 +-
 .../solr/handler/dataimport/RegexTransformer.java  |    2 +-
 .../handler/dataimport/SimplePropertiesWriter.java |   10 +-
 .../apache/solr/handler/dataimport/SolrWriter.java |   10 +-
 .../handler/dataimport/SqlEntityProcessor.java     |   10 +-
 .../handler/dataimport/TemplateTransformer.java    |    4 +-
 .../solr/handler/dataimport/URLDataSource.java     |    6 +-
 .../handler/dataimport/XPathEntityProcessor.java   |   13 +-
 .../solr/handler/dataimport/XPathRecordReader.java |    2 +-
 .../handler/dataimport/ZKPropertiesWriter.java     |    4 +-
 .../dataimport/config/DIHConfiguration.java        |    8 +-
 .../AbstractSqlEntityProcessorTestCase.java        |   23 +-
 .../dataimport/TestSortedMapBackedCache.java       |    6 +-
 .../dataimport/TestSqlEntityProcessorDelta.java    |   12 +-
 solr/contrib/extraction/README.md                  |   17 +
 solr/contrib/extraction/README.txt                 |   16 -
 solr/contrib/extraction/build.gradle               |    2 +
 .../extraction/ExtractingDocumentLoader.java       |    6 +-
 .../extraction/ExtractingRequestHandler.java       |   47 +-
 .../extraction/RegexRulesPasswordProvider.java     |    4 +-
 solr/contrib/jaegertracer-configurator/README.md   |   33 +
 solr/contrib/jaegertracer-configurator/README.txt  |   32 -
 .../contrib/jaegertracer-configurator/build.gradle |    2 +
 solr/contrib/langid/README.md                      |   22 +
 solr/contrib/langid/README.txt                     |   22 -
 solr/contrib/langid/build.gradle                   |    2 +
 .../LanguageIdentifierUpdateProcessor.java         |   48 +-
 .../TikaLanguageIdentifierUpdateProcessor.java     |    5 +-
 solr/contrib/ltr/README.md                         |    2 +-
 solr/contrib/ltr/README.txt                        |    1 -
 solr/contrib/ltr/build.gradle                      |    2 +
 .../java/org/apache/solr/ltr/feature/Feature.java  |   41 +
 .../solr/ltr/feature/OriginalScoreFeature.java     |   23 +-
 .../org/apache/solr/ltr/feature/SolrFeature.java   |  201 +-
 .../solr/ltr/store/rest/ManagedModelStore.java     |    4 +-
 .../apache/solr/ltr/TestLTRReRankingPipeline.java  |    6 +-
 .../test/org/apache/solr/ltr/TestRerankBase.java   |   10 +-
 .../org/apache/solr/ltr/feature/TestFeature.java   |   48 +
 .../solr/ltr/feature/TestOriginalScoreScorer.java  |   47 -
 solr/contrib/prometheus-exporter/README.md         |   21 +
 solr/contrib/prometheus-exporter/README.txt        |   21 -
 solr/contrib/prometheus-exporter/build.gradle      |    2 +
 .../collector/SchedulerMetricsCollector.java       |    4 +-
 .../solr/prometheus/exporter/SolrExporter.java     |   10 +-
 .../solr/prometheus/scraper/SolrScraper.java       |    4 +-
 .../prometheus/scraper/SolrCloudScraperTest.java   |    4 +-
 .../scraper/SolrStandaloneScraperTest.java         |    4 +-
 solr/contrib/velocity/build.gradle                 |    2 +
 .../solr/response/VelocityResponseWriter.java      |    6 +-
 solr/core/build.gradle                             |    3 +-
 solr/core/ivy.xml                                  |    1 -
 .../solr/analysis/LowerCaseTokenizerFactory.java   |    5 +
 .../analysis/ReversedWildcardFilterFactory.java    |    4 +
 .../src/java/org/apache/solr/api/AnnotatedApi.java |   91 +-
 solr/core/src/java/org/apache/solr/api/ApiBag.java |   15 +-
 .../src/java/org/apache/solr/api/EndPoint.java     |    2 +-
 .../src/java/org/apache/solr/api/V2HttpCall.java   |   22 +-
 .../client/solrj/embedded/EmbeddedSolrServer.java  |    6 +-
 .../client/solrj/embedded/JettySolrRunner.java     |   15 +-
 .../apache/solr/cloud/ActiveReplicaWatcher.java    |   19 +-
 .../apache/solr/cloud/CloudConfigSetService.java   |    6 +-
 .../src/java/org/apache/solr/cloud/CloudUtil.java  |   14 +-
 .../solr/cloud/CurrentCoreDescriptorProvider.java  |   28 -
 .../org/apache/solr/cloud/ElectionContext.java     |  707 ---
 .../apache/solr/cloud/ExclusiveSliceProperty.java  |   10 +-
 .../java/org/apache/solr/cloud/LeaderElector.java  |    2 +-
 .../src/java/org/apache/solr/cloud/LockTree.java   |    4 +-
 .../src/java/org/apache/solr/cloud/Overseer.java   |   94 +-
 .../cloud/OverseerConfigSetMessageHandler.java     |    2 +-
 .../apache/solr/cloud/OverseerElectionContext.java |  110 +
 .../apache/solr/cloud/OverseerNodePrioritizer.java |    4 +-
 .../apache/solr/cloud/OverseerTaskProcessor.java   |   67 +-
 .../org/apache/solr/cloud/OverseerTaskQueue.java   |   13 +-
 .../solr/cloud/RecoveringCoreTermWatcher.java      |    4 +-
 .../org/apache/solr/cloud/RecoveryStrategy.java    |  101 +-
 .../solr/cloud/ShardLeaderElectionContext.java     |  493 ++
 .../solr/cloud/ShardLeaderElectionContextBase.java |  194 +
 .../java/org/apache/solr/cloud/SolrZkServer.java   |   10 +-
 .../java/org/apache/solr/cloud/SyncStrategy.java   |   61 +-
 .../core/src/java/org/apache/solr/cloud/ZkCLI.java |    5 +-
 .../java/org/apache/solr/cloud/ZkController.java   |  236 +-
 .../org/apache/solr/cloud/ZkDistributedQueue.java  |   17 +-
 .../apache/solr/cloud/ZkSolrResourceLoader.java    |   34 +-
 .../solr/cloud/api/collections/AddReplicaCmd.java  |    4 +-
 .../apache/solr/cloud/api/collections/Assign.java  |   16 +-
 .../solr/cloud/api/collections/BackupCmd.java      |    4 +-
 .../cloud/api/collections/CategoryRoutedAlias.java |    4 +-
 .../cloud/api/collections/CreateCollectionCmd.java |   59 +-
 .../solr/cloud/api/collections/CreateShardCmd.java |    2 +-
 .../cloud/api/collections/CreateSnapshotCmd.java   |   20 +-
 .../cloud/api/collections/DeleteReplicaCmd.java    |    4 +-
 .../solr/cloud/api/collections/DeleteShardCmd.java |   10 +-
 .../cloud/api/collections/DeleteSnapshotCmd.java   |    6 +-
 .../api/collections/MaintainRoutedAliasCmd.java    |    6 +-
 .../solr/cloud/api/collections/MigrateCmd.java     |   43 +-
 .../solr/cloud/api/collections/MoveReplicaCmd.java |   16 +-
 .../OverseerCollectionMessageHandler.java          |   24 +-
 .../api/collections/ReindexCollectionCmd.java      |   50 +-
 .../solr/cloud/api/collections/ReplaceNodeCmd.java |   20 +-
 .../solr/cloud/api/collections/RestoreCmd.java     |   18 +-
 .../solr/cloud/api/collections/RoutedAlias.java    |    2 +-
 .../solr/cloud/api/collections/SplitShardCmd.java  |   48 +-
 .../cloud/api/collections/TimeRoutedAlias.java     |   12 +-
 .../solr/cloud/api/collections/UtilizeNodeCmd.java |   10 +-
 .../autoscaling/AutoAddReplicasPlanAction.java     |   47 +-
 .../apache/solr/cloud/autoscaling/AutoScaling.java |    2 +
 .../solr/cloud/autoscaling/AutoScalingHandler.java |   10 +
 .../solr/cloud/autoscaling/ComputePlanAction.java  |  184 +-
 .../solr/cloud/autoscaling/ExecutePlanAction.java  |   16 +-
 .../cloud/autoscaling/HttpTriggerListener.java     |    2 +-
 .../autoscaling/InactiveMarkersPlanAction.java     |    6 +-
 .../cloud/autoscaling/InactiveShardPlanAction.java |   23 +-
 .../solr/cloud/autoscaling/IndexSizeTrigger.java   |   84 +-
 .../solr/cloud/autoscaling/MetricTrigger.java      |    1 +
 .../solr/cloud/autoscaling/NodeAddedTrigger.java   |   14 +-
 .../solr/cloud/autoscaling/NodeLostTrigger.java    |    8 +-
 .../cloud/autoscaling/OverseerTriggerThread.java   |   34 +-
 .../solr/cloud/autoscaling/ScheduledTriggers.java  |   57 +-
 .../solr/cloud/autoscaling/SearchRateTrigger.java  |   21 +-
 .../solr/cloud/autoscaling/SystemLogListener.java  |    2 +
 .../apache/solr/cloud/autoscaling/TriggerBase.java |   25 +-
 .../solr/cloud/autoscaling/TriggerEvent.java       |    4 +
 .../solr/cloud/autoscaling/TriggerEventQueue.java  |   12 +-
 .../solr/cloud/autoscaling/TriggerUtils.java       |    1 +
 .../autoscaling/sim/GenericDistributedQueue.java   |    6 +-
 .../cloud/autoscaling/sim/SimCloudManager.java     |   56 +-
 .../autoscaling/sim/SimClusterStateProvider.java   |  372 +-
 .../autoscaling/sim/SimDistribStateManager.java    |    9 +-
 .../sim/SimDistributedQueueFactory.java            |    5 +-
 .../autoscaling/sim/SimNodeStateProvider.java      |   24 +-
 .../solr/cloud/autoscaling/sim/SimScenario.java    |   61 +-
 .../solr/cloud/autoscaling/sim/SimUtils.java       |    5 +-
 .../autoscaling/sim/SnapshotCloudManager.java      |    5 +-
 .../sim/SnapshotClusterStateProvider.java          |   34 +-
 .../sim/SnapshotDistribStateManager.java           |   10 +-
 .../autoscaling/sim/SnapshotNodeStateProvider.java |    5 +-
 .../solr/cloud/overseer/ClusterStateMutator.java   |    2 +-
 .../solr/cloud/overseer/CollectionMutator.java     |    6 +-
 .../apache/solr/cloud/overseer/NodeMutator.java    |    4 +-
 .../apache/solr/cloud/overseer/ReplicaMutator.java |   38 +-
 .../apache/solr/cloud/overseer/SliceMutator.java   |   14 +-
 .../apache/solr/cloud/overseer/ZkStateWriter.java  |    4 +-
 .../apache/solr/cloud/rule/ReplicaAssigner.java    |    2 +-
 .../java/org/apache/solr/core/BlobRepository.java  |    8 +-
 .../apache/solr/core/CachingDirectoryFactory.java  |   32 +-
 .../src/java/org/apache/solr/core/CloudConfig.java |   25 +-
 .../org/apache/solr/core/ConfigSetProperties.java  |    4 +-
 .../org/apache/solr/core/ConfigSetService.java     |    8 +-
 .../java/org/apache/solr/core/CoreContainer.java   |  184 +-
 .../apache/solr/core/CorePropertiesLocator.java    |   14 +-
 .../src/java/org/apache/solr/core/CoreSorter.java  |   77 +-
 .../src/java/org/apache/solr/core/Diagnostics.java |    2 +-
 .../org/apache/solr/core/DirectoryFactory.java     |    2 +-
 .../org/apache/solr/core/HdfsDirectoryFactory.java |   23 +-
 .../src/java/org/apache/solr/core/NodeConfig.java  |   60 +-
 .../src/java/org/apache/solr/core/PluginBag.java   |   30 +-
 .../org/apache/solr/core/QuerySenderListener.java  |    2 +-
 .../java/org/apache/solr/core/RequestHandlers.java |    6 +-
 .../java/org/apache/solr/core/RequestParams.java   |    8 +-
 .../org/apache/solr/core/SchemaCodecFactory.java   |    8 +-
 .../src/java/org/apache/solr/core/SolrConfig.java  |   90 +-
 .../src/java/org/apache/solr/core/SolrCore.java    |  129 +-
 .../src/java/org/apache/solr/core/SolrCores.java   |    8 +-
 .../org/apache/solr/core/SolrDeletionPolicy.java   |   12 +-
 .../src/java/org/apache/solr/core/SolrPaths.java   |  131 +
 .../org/apache/solr/core/SolrResourceLoader.java   |  303 +-
 .../java/org/apache/solr/core/SolrXmlConfig.java   |   57 +-
 .../apache/solr/core/StandardDirectoryFactory.java |    2 +-
 .../solr/core/TransientSolrCoreCacheDefault.java   |   12 +-
 .../java/org/apache/solr/core/XmlConfigFile.java   |   56 +-
 .../src/java/org/apache/solr/core/ZkContainer.java |  105 +-
 .../solr/core/snapshots/SolrSnapshotManager.java   |    8 +-
 .../snapshots/SolrSnapshotMetaDataManager.java     |   10 +-
 .../solr/core/snapshots/SolrSnapshotsTool.java     |   15 +-
 .../apache/solr/filestore/DistribPackageStore.java |   18 +-
 .../org/apache/solr/filestore/PackageStoreAPI.java |    2 +-
 .../java/org/apache/solr/handler/BlobHandler.java  |   14 +-
 .../java/org/apache/solr/handler/CatStream.java    |    6 +-
 .../solr/handler/CdcrBufferStateManager.java       |    6 +-
 .../solr/handler/CdcrLeaderStateManager.java       |    2 +-
 .../solr/handler/CdcrProcessStateManager.java      |    6 +-
 .../org/apache/solr/handler/CdcrReplicator.java    |    8 +-
 .../apache/solr/handler/CdcrReplicatorManager.java |   35 +-
 .../solr/handler/CdcrReplicatorScheduler.java      |   10 +-
 .../apache/solr/handler/CdcrRequestHandler.java    |    4 +-
 .../solr/handler/CdcrUpdateLogSynchronizer.java    |   10 +-
 .../handler/DocumentAnalysisRequestHandler.java    |    4 +-
 .../java/org/apache/solr/handler/GraphHandler.java |    5 +-
 .../java/org/apache/solr/handler/IndexFetcher.java |   91 +-
 .../apache/solr/handler/MoreLikeThisHandler.java   |    2 +-
 .../apache/solr/handler/ReplicationHandler.java    |   49 +-
 .../java/org/apache/solr/handler/RestoreCore.java  |    2 +-
 .../org/apache/solr/handler/SchemaHandler.java     |    4 +-
 .../java/org/apache/solr/handler/SnapShooter.java  |   22 +-
 .../org/apache/solr/handler/SolrConfigHandler.java |   38 +-
 .../org/apache/solr/handler/StreamHandler.java     |   50 +-
 .../solr/handler/admin/AdminHandlersProxy.java     |    8 +-
 .../org/apache/solr/handler/admin/ColStatus.java   |    7 +-
 .../solr/handler/admin/CollectionHandlerApi.java   |    2 +-
 .../solr/handler/admin/CollectionsHandler.java     |   25 +-
 .../solr/handler/admin/ConfigSetsHandler.java      |   10 +-
 .../solr/handler/admin/CoreAdminHandler.java       |    4 +-
 .../solr/handler/admin/CoreAdminOperation.java     |    2 +-
 .../solr/handler/admin/HealthCheckHandler.java     |   80 +-
 .../solr/handler/admin/IndexSizeEstimator.java     |    4 +-
 .../apache/solr/handler/admin/LoggingHandler.java  |    7 +-
 .../solr/handler/admin/LukeRequestHandler.java     |   35 +-
 .../handler/admin/MetricsCollectorHandler.java     |    8 +-
 .../solr/handler/admin/MetricsHistoryHandler.java  |   22 +-
 .../apache/solr/handler/admin/PrepRecoveryOp.java  |   22 +-
 .../solr/handler/admin/RebalanceLeaders.java       |   16 +-
 .../solr/handler/admin/RequestSyncShardOp.java     |    8 +-
 .../handler/admin/SegmentsInfoRequestHandler.java  |   11 +-
 .../solr/handler/admin/ShowFileRequestHandler.java |   14 +-
 .../org/apache/solr/handler/admin/SplitOp.java     |   12 +-
 .../solr/handler/admin/SystemInfoHandler.java      |   56 +-
 .../solr/handler/admin/ZookeeperInfoHandler.java   |    6 +-
 .../solr/handler/admin/ZookeeperReadAPI.java       |  145 +
 .../solr/handler/admin/ZookeeperStatusHandler.java |   77 +-
 .../solr/handler/component/ExpandComponent.java    |  110 +-
 .../solr/handler/component/FacetComponent.java     |   16 +-
 .../handler/component/HttpShardHandlerFactory.java |   23 +-
 .../handler/component/IterativeMergeStrategy.java  |    4 +-
 .../handler/component/MoreLikeThisComponent.java   |   36 +-
 .../handler/component/PivotFacetProcessor.java     |    4 +-
 .../solr/handler/component/QueryComponent.java     |   32 +-
 .../handler/component/QueryElevationComponent.java |    6 +-
 .../solr/handler/component/RangeFacetRequest.java  |   13 +-
 .../handler/component/RealTimeGetComponent.java    |   13 +-
 .../solr/handler/component/ResponseBuilder.java    |    3 +-
 .../solr/handler/component/SearchHandler.java      |    4 +-
 .../handler/component/SortedDateStatsValues.java   |    4 +-
 .../component/SortedNumericStatsValues.java        |    4 +-
 .../handler/component/SpellCheckComponent.java     |   31 +-
 .../solr/handler/component/StatsComponent.java     |  110 +-
 .../apache/solr/handler/component/StatsInfo.java   |  108 +
 .../solr/handler/component/StatsValuesFactory.java | 1379 ++---
 .../solr/handler/component/SuggestComponent.java   |   45 +-
 .../org/apache/solr/handler/export/DoubleCmp.java  |   43 -
 .../org/apache/solr/handler/export/DoubleComp.java |   45 +
 .../apache/solr/handler/export/ExportWriter.java   |   28 +-
 .../org/apache/solr/handler/export/FloatCmp.java   |   44 -
 .../org/apache/solr/handler/export/FloatComp.java  |   44 +
 .../org/apache/solr/handler/export/IntComp.java    |   32 +-
 .../org/apache/solr/handler/export/LongCmp.java    |   45 -
 .../org/apache/solr/handler/export/LongComp.java   |   46 +
 .../org/apache/solr/handler/loader/JsonLoader.java |    8 +-
 .../org/apache/solr/handler/loader/XMLLoader.java  |   20 +-
 .../apache/solr/handler/sql/CalciteSolrDriver.java |   16 +-
 .../org/apache/solr/handler/sql/SolrSchema.java    |   62 +-
 .../org/apache/solr/handler/sql/SolrTable.java     |    5 +-
 .../org/apache/solr/handler/tagger/Tagger.java     |    9 +-
 .../solr/handler/tagger/TaggerRequestHandler.java  |  114 +-
 .../solr/highlight/DefaultSolrHighlighter.java     |    4 +-
 .../java/org/apache/solr/logging/LogWatcher.java   |    8 +-
 .../org/apache/solr/logging/MDCLoggingContext.java |   78 +-
 .../org/apache/solr/metrics/MetricSuppliers.java   |   10 +-
 .../java/org/apache/solr/metrics/MetricsMap.java   |    2 +-
 .../org/apache/solr/metrics/SolrMetricManager.java |   67 +-
 .../apache/solr/metrics/SolrMetricProducer.java    |    9 +-
 .../apache/solr/metrics/SolrMetricReporter.java    |    4 +-
 .../metrics/reporters/ReporterClientCache.java     |    4 +-
 .../solr/metrics/reporters/SolrJmxReporter.java    |    6 +-
 .../metrics/reporters/jmx/JmxMetricsReporter.java  |   18 +-
 .../reporters/solr/SolrClusterReporter.java        |    6 +-
 .../solr/metrics/reporters/solr/SolrReporter.java  |   80 +-
 .../metrics/reporters/solr/SolrShardReporter.java  |   18 +-
 .../apache/solr/metrics/rrd/SolrRrdBackend.java    |    2 +-
 .../solr/metrics/rrd/SolrRrdBackendFactory.java    |   24 +-
 .../packagemanager/DefaultPackageRepository.java   |    4 +-
 .../apache/solr/packagemanager/PackageManager.java |    2 +-
 .../java/org/apache/solr/pkg/PackageListeners.java |    5 +-
 .../java/org/apache/solr/pkg/PackageLoader.java    |   26 +-
 .../org/apache/solr/pkg/PackagePluginHolder.java   |   10 +-
 .../java/org/apache/solr/request/SimpleFacets.java |   15 +-
 .../apache/solr/request/SolrRequestHandler.java    |    2 +-
 .../org/apache/solr/request/SolrRequestInfo.java   |    2 +-
 .../solr/request/json/JsonQueryConverter.java      |   50 +-
 .../apache/solr/response/BinaryResponseWriter.java |    6 +-
 .../solr/response/GeoJSONResponseWriter.java       |   18 +-
 .../apache/solr/response/JSONResponseWriter.java   |   16 +-
 .../java/org/apache/solr/response/JSONWriter.java  |   43 +-
 .../solr/response/PHPSerializedResponseWriter.java |   16 +-
 .../apache/solr/response/PythonResponseWriter.java |    2 +-
 .../apache/solr/response/RubyResponseWriter.java   |    2 +-
 .../org/apache/solr/response/SchemaXmlWriter.java  |    7 +-
 .../solr/response/TabularResponseWriter.java       |    7 +
 .../apache/solr/response/TextResponseWriter.java   |   16 +-
 .../java/org/apache/solr/response/XMLWriter.java   |   26 +-
 .../apache/solr/response/XSLTResponseWriter.java   |    2 +-
 .../response/transform/ChildDocTransformer.java    |    6 +
 .../transform/ChildDocTransformerFactory.java      |   11 +-
 .../response/transform/DocIdAugmenterFactory.java  |   31 +-
 .../response/transform/ShardAugmenterFactory.java  |    2 +-
 .../transform/SubQueryAugmenterFactory.java        |   14 +-
 .../response/transform/ValueAugmenterFactory.java  |   78 +-
 .../java/org/apache/solr/rest/ManagedResource.java |   12 +-
 .../apache/solr/rest/ManagedResourceStorage.java   |   31 +-
 .../src/java/org/apache/solr/rest/RestManager.java |   21 +-
 .../analysis/BaseManagedTokenFilterFactory.java    |    5 +
 .../schema/analysis/ManagedStopFilterFactory.java  |    5 +
 .../analysis/ManagedSynonymFilterFactory.java      |    9 +-
 .../analysis/ManagedSynonymGraphFilterFactory.java |    9 +-
 .../schema/analysis/ManagedWordSetResource.java    |    7 +-
 .../org/apache/solr/schema/AbstractEnumField.java  |    5 +-
 .../solr/schema/AbstractSpatialFieldType.java      |    2 +-
 .../schema/AbstractSpatialPrefixTreeFieldType.java |    4 +-
 .../java/org/apache/solr/schema/BinaryField.java   |    2 +-
 .../src/java/org/apache/solr/schema/BoolField.java |  145 +-
 .../org/apache/solr/schema/DatePointField.java     |   49 +-
 .../src/java/org/apache/solr/schema/EnumField.java |    2 +-
 .../org/apache/solr/schema/ExternalFileField.java  |    3 +-
 .../solr/schema/ExternalFileFieldReloader.java     |    4 +-
 .../src/java/org/apache/solr/schema/FieldType.java |    2 +-
 .../apache/solr/schema/FieldTypePluginLoader.java  |   24 +-
 .../java/org/apache/solr/schema/IndexSchema.java   |   70 +-
 .../org/apache/solr/schema/IndexSchemaFactory.java |    4 +-
 .../apache/solr/schema/JsonPreAnalyzedParser.java  |   10 +-
 .../org/apache/solr/schema/ManagedIndexSchema.java |   85 +-
 .../solr/schema/ManagedIndexSchemaFactory.java     |  113 +-
 .../solr/schema/OpenExchangeRatesOrgProvider.java  |   12 +-
 .../java/org/apache/solr/schema/PointField.java    |    4 +-
 .../org/apache/solr/schema/PreAnalyzedField.java   |    9 +-
 .../java/org/apache/solr/schema/SchemaManager.java |    6 +-
 .../src/java/org/apache/solr/schema/TrieField.java |    2 +-
 .../apache/solr/schema/ZkIndexSchemaReader.java    |   16 +-
 .../java/org/apache/solr/search/CacheConfig.java   |    8 +-
 .../java/org/apache/solr/search/CaffeineCache.java |    3 +-
 .../solr/search/CollapsingQParserPlugin.java       |  508 +-
 .../src/java/org/apache/solr/search/DocList.java   |    3 +
 .../src/java/org/apache/solr/search/DocSet.java    |   12 +-
 .../java/org/apache/solr/search/DocSetUtil.java    |   10 +-
 .../src/java/org/apache/solr/search/DocSlice.java  |   15 +-
 .../apache/solr/search/ExtendedDismaxQParser.java  |    2 +-
 .../org/apache/solr/search/FunctionQParser.java    |   30 +-
 .../org/apache/solr/search/FunctionRangeQuery.java |    3 +
 .../solr/search/GraphTermsQParserPlugin.java       |   96 +-
 .../src/java/org/apache/solr/search/Grouping.java  |   19 +-
 .../org/apache/solr/search/JoinQParserPlugin.java  |    6 +-
 .../org/apache/solr/search/MaxScoreCollector.java  |    4 +-
 .../java/org/apache/solr/search/QueryCommand.java  |   10 +
 .../java/org/apache/solr/search/QueryContext.java  |    1 +
 .../org/apache/solr/search/QueryResultKey.java     |   15 +-
 .../java/org/apache/solr/search/QueryUtils.java    |   32 +-
 .../java/org/apache/solr/search/ScoreFilter.java   |   21 -
 .../src/java/org/apache/solr/search/SolrCache.java |    3 +-
 .../org/apache/solr/search/SolrCacheHolder.java    |  150 -
 .../org/apache/solr/search/SolrIndexSearcher.java  |  221 +-
 .../apache/solr/search/SurroundQParserPlugin.java  |    2 +-
 .../org/apache/solr/search/ValueSourceParser.java  |  637 ++-
 .../java/org/apache/solr/search/facet/AggUtil.java |    8 +-
 .../apache/solr/search/facet/AggValueSource.java   |    3 +-
 .../java/org/apache/solr/search/facet/AvgAgg.java  |   22 +-
 .../org/apache/solr/search/facet/CountAgg.java     |    6 +-
 .../org/apache/solr/search/facet/CountValsAgg.java |   12 +-
 .../org/apache/solr/search/facet/DocValuesAcc.java |  575 +--
 .../org/apache/solr/search/facet/FacetBucket.java  |   10 +-
 .../org/apache/solr/search/facet/FacetContext.java |   74 +
 .../apache/solr/search/facet/FacetDebugInfo.java   |    2 +-
 .../org/apache/solr/search/facet/FacetField.java   |   45 +-
 .../apache/solr/search/facet/FacetFieldMerger.java |    7 +-
 .../solr/search/facet/FacetFieldProcessor.java     |   31 +-
 .../search/facet/FacetFieldProcessorByArray.java   |    2 +-
 .../FacetFieldProcessorByEnumTermsStream.java      |    1 +
 .../search/facet/FacetFieldProcessorByHashDV.java  |   31 +-
 .../org/apache/solr/search/facet/FacetHeatmap.java |   12 +-
 .../org/apache/solr/search/facet/FacetModule.java  |  307 +-
 .../org/apache/solr/search/facet/FacetParser.java  |  637 +++
 .../apache/solr/search/facet/FacetProcessor.java   |   23 +-
 .../org/apache/solr/search/facet/FacetQuery.java   |    3 +-
 .../org/apache/solr/search/facet/FacetRange.java   | 1038 +---
 .../apache/solr/search/facet/FacetRangeMerger.java |    4 +-
 .../apache/solr/search/facet/FacetRangeParser.java |   76 +
 .../solr/search/facet/FacetRangeProcessor.java     | 1077 ++++
 .../org/apache/solr/search/facet/FacetRequest.java |  712 +--
 .../solr/search/facet/FacetRequestSorted.java      |   58 +
 .../search/facet/FacetRequestSortedMerger.java     |   10 +-
 .../java/org/apache/solr/search/facet/HLLAgg.java  |   27 +-
 .../org/apache/solr/search/facet/LegacyFacet.java  |    1 +
 .../org/apache/solr/search/facet/MinMaxAgg.java    |   21 +-
 .../org/apache/solr/search/facet/MissingAgg.java   |    6 +-
 .../apache/solr/search/facet/PercentileAgg.java    |    8 +-
 .../apache/solr/search/facet/RelatednessAgg.java   |   12 +-
 .../solr/search/facet/SimpleAggValueSource.java    |    1 +
 .../java/org/apache/solr/search/facet/SlotAcc.java |  688 +--
 .../org/apache/solr/search/facet/StddevAgg.java    |   20 +-
 .../java/org/apache/solr/search/facet/SumAgg.java  |   12 +-
 .../org/apache/solr/search/facet/SumsqAgg.java     |   10 +-
 .../apache/solr/search/facet/UnInvertedField.java  |   10 +-
 .../solr/search/facet/UnInvertedFieldAcc.java      |  174 +-
 .../org/apache/solr/search/facet/UniqueAgg.java    |   45 +-
 .../apache/solr/search/facet/UniqueBlockAgg.java   |   11 +-
 .../solr/search/facet/UniqueBlockFieldAgg.java     |    2 +-
 .../solr/search/facet/UniqueBlockQueryAgg.java     |    6 +-
 .../apache/solr/search/facet/UniqueSlotAcc.java    |   24 +-
 .../org/apache/solr/search/facet/VarianceAgg.java  |   20 +-
 .../search/function/CollapseScoreFunction.java     |    2 +
 .../solr/search/function/FieldNameValueSource.java |    1 +
 .../solr/search/function/FileFloatSource.java      |   37 +-
 .../solr/search/function/MultiStringFunction.java  |    1 +
 .../solr/search/function/OrdFieldSource.java       |    1 +
 .../search/function/ReverseOrdFieldSource.java     |    1 +
 .../search/function/ValueSourceRangeFilter.java    |    2 +
 .../distance/GeoDistValueSourceParser.java         |    3 +-
 .../search/function/distance/GeohashFunction.java  |    1 +
 .../distance/GeohashHaversineFunction.java         |    2 +
 .../function/distance/HaversineConstFunction.java  |    2 +
 .../function/distance/HaversineFunction.java       |    2 +
 .../function/distance/StringDistanceFunction.java  |    1 +
 .../function/distance/VectorDistanceFunction.java  |    2 +
 .../solr/search/grouping/CommandHandler.java       |   12 +-
 .../solr/search/join/BlockJoinChildQParser.java    |    7 +-
 .../solr/search/join/BlockJoinParentQParser.java   |  105 +-
 .../search/join/ChildFieldValueSourceParser.java   |    6 +-
 .../solr/search/join/GraphEdgeCollector.java       |  207 +
 .../org/apache/solr/search/join/GraphQuery.java    |    2 +-
 .../solr/search/join/GraphTermsCollector.java      |  202 -
 .../org/apache/solr/search/join/XCJFQuery.java     |    7 +-
 .../solr/search/stats/ExactSharedStatsCache.java   |    4 +-
 .../apache/solr/search/stats/ExactStatsCache.java  |   12 +-
 .../apache/solr/search/stats/LRUStatsCache.java    |    6 +-
 .../org/apache/solr/search/stats/StatsUtil.java    |   17 +-
 .../apache/solr/security/AuditLoggerPlugin.java    |   14 +-
 .../apache/solr/security/AuthenticationPlugin.java |   20 +-
 .../org/apache/solr/security/BasicAuthPlugin.java  |   19 +-
 .../org/apache/solr/security/CertAuthPlugin.java   |   51 +
 .../ExternalRoleRuleBasedAuthorizationPlugin.java  |   57 +
 .../org/apache/solr/security/HadoopAuthPlugin.java |   44 +-
 .../org/apache/solr/security/JWTAuthPlugin.java    |   52 +-
 .../org/apache/solr/security/JWTPrincipal.java     |    4 +-
 .../solr/security/JWTVerificationkeyResolver.java  |    6 +-
 .../org/apache/solr/security/KerberosFilter.java   |    6 +-
 .../org/apache/solr/security/KerberosPlugin.java   |   49 +-
 .../solr/security/MultiDestinationAuditLogger.java |    6 +-
 .../solr/security/PKIAuthenticationPlugin.java     |   41 +-
 .../solr/security/PermissionNameProvider.java      |    1 +
 .../security/RuleBasedAuthorizationPlugin.java     |  293 +-
 .../security/RuleBasedAuthorizationPluginBase.java |  339 ++
 .../security/Sha256AuthenticationProvider.java     |    2 +-
 .../solr/security/SolrLogAuditLoggerPlugin.java    |    4 +-
 .../java/org/apache/solr/servlet/HttpSolrCall.java |   17 +-
 .../apache/solr/servlet/LoadAdminUiServlet.java    |   31 +-
 .../org/apache/solr/servlet/ResponseUtils.java     |    2 +-
 .../apache/solr/servlet/SolrDispatchFilter.java    |  124 +-
 .../apache/solr/servlet/SolrRequestParsers.java    |  149 +-
 .../solr/spelling/DirectSolrSpellChecker.java      |    4 +-
 .../apache/solr/spelling/SpellCheckCollator.java   |   16 +-
 .../DocumentExpressionDictionaryFactory.java       |   20 +-
 .../solr/spelling/suggest/SolrSuggester.java       |   26 +-
 .../apache/solr/spelling/suggest/Suggester.java    |   12 +-
 .../suggest/jaspell/JaspellLookupFactory.java      |    2 +-
 .../org/apache/solr/store/hdfs/HdfsDirectory.java  |    4 +-
 .../solr/store/hdfs/HdfsLocalityReporter.java      |    6 +-
 .../org/apache/solr/update/CdcrTransactionLog.java |    6 +-
 .../java/org/apache/solr/update/CdcrUpdateLog.java |    2 +-
 .../java/org/apache/solr/update/CommitTracker.java |   24 +-
 .../apache/solr/update/DefaultSolrCoreState.java   |    4 +-
 .../apache/solr/update/DirectUpdateHandler2.java   |   36 +-
 .../org/apache/solr/update/HdfsTransactionLog.java |    6 +-
 .../java/org/apache/solr/update/HdfsUpdateLog.java |   18 +-
 .../org/apache/solr/update/LoggingInfoStream.java  |    4 +-
 .../src/java/org/apache/solr/update/PeerSync.java  |   67 +-
 .../org/apache/solr/update/PeerSyncWithLeader.java |   27 +-
 .../org/apache/solr/update/SolrCmdDistributor.java |   11 +-
 .../org/apache/solr/update/SolrIndexConfig.java    |    2 +-
 .../org/apache/solr/update/SolrIndexSplitter.java  |   27 +-
 .../org/apache/solr/update/SolrIndexWriter.java    |   12 +-
 .../apache/solr/update/StreamingSolrClients.java   |    2 +-
 .../org/apache/solr/update/TransactionLog.java     |  169 +-
 .../java/org/apache/solr/update/UpdateHandler.java |    8 +-
 .../src/java/org/apache/solr/update/UpdateLog.java |   83 +-
 .../org/apache/solr/update/UpdateShardHandler.java |   16 +-
 .../AddSchemaFieldsUpdateProcessorFactory.java     |    2 +-
 ...llValuesOrNoneFieldMutatingUpdateProcessor.java |    6 +-
 .../processor/AtomicUpdateProcessorFactory.java    |    4 +-
 .../CloneFieldUpdateProcessorFactory.java          |    6 +-
 .../processor/DistributedUpdateProcessor.java      |   83 +-
 .../processor/DistributedZkUpdateProcessor.java    |   54 +-
 .../DocBasedVersionConstraintsProcessor.java       |    2 +-
 ...DocBasedVersionConstraintsProcessorFactory.java |    5 +-
 .../DocExpirationUpdateProcessorFactory.java       |   14 +-
 .../FieldValueMutatingUpdateProcessor.java         |   12 +-
 .../processor/LogUpdateProcessorFactory.java       |   29 +-
 .../ParseDateFieldUpdateProcessorFactory.java      |    6 +-
 .../ParseDoubleFieldUpdateProcessorFactory.java    |    6 +-
 .../ParseFloatFieldUpdateProcessorFactory.java     |    6 +-
 .../ParseIntFieldUpdateProcessorFactory.java       |    6 +-
 .../ParseLongFieldUpdateProcessorFactory.java      |    6 +-
 .../update/processor/RegexpBoostProcessor.java     |   12 +-
 .../processor/RunUpdateProcessorFactory.java       |  123 +-
 .../update/processor/TolerantUpdateProcessor.java  |    2 +-
 .../update/processor/URLClassifyProcessor.java     |    4 +-
 .../processor/UpdateRequestProcessorChain.java     |    8 +-
 .../apache/solr/util/DefaultSolrThreadFactory.java |   49 -
 .../src/java/org/apache/solr/util/DynamicMap.java  |   55 +
 .../src/java/org/apache/solr/util/ExportTool.java  |    5 +-
 .../src/java/org/apache/solr/util/FSHDFSUtils.java |   14 +-
 .../java/org/apache/solr/util/FloatConsumer.java   |   39 +
 .../org/apache/solr/util/IntFloatDynamicMap.java   |  118 +
 .../org/apache/solr/util/IntIntDynamicMap.java     |  120 +
 .../org/apache/solr/util/IntLongDynamicMap.java    |  120 +
 .../src/java/org/apache/solr/util/PackageTool.java |    6 +-
 .../src/java/org/apache/solr/util/SolrCLI.java     | 1325 +++--
 .../apache/solr/util/SolrFileCleaningTracker.java  |  147 -
 .../java/org/apache/solr/util/SolrLogPostTool.java |  256 +-
 .../java/org/apache/solr/util/SolrPluginUtils.java |    2 +-
 .../org/apache/solr/util/StartupLoggingUtils.java  |    3 +-
 .../java/org/apache/solr/util/TestInjection.java   |    2 +-
 .../SSLCredentialProviderFactory.java              |    4 +-
 .../providers/HadoopSSLCredentialProvider.java     |    2 +-
 .../solr/util/plugin/AbstractPluginLoader.java     |    8 +-
 .../org/apache/solr/util/stats/MetricUtils.java    |    4 +-
 .../apache/solr/util/xslt/TransformerProvider.java |   10 +-
 solr/core/src/resources/ImplicitPlugins.json       |   13 +-
 .../conf/solrconfig-cache-enable-disable.xml       |   10 +
 solr/core/src/test-files/solr/solr-jmxreporter.xml |    1 -
 .../core/src/test-files/solr/solr-solrreporter.xml |    5 +-
 .../test-files/solr/solr-trackingshardhandler.xml  |    1 -
 solr/core/src/test-files/solr/solr.xml             |    1 -
 .../src/test/org/apache/hadoop/fs/FileUtil.java    |   26 +-
 .../datanode/fsdataset/impl/BlockPoolSlice.java    |   50 +-
 .../test/org/apache/hadoop/http/HttpServer2.java   |   55 +-
 .../test/org/apache/hadoop/util/DiskChecker.java   |    4 +-
 .../org/apache/solr/BasicFunctionalityTest.java    |    2 +-
 .../test/org/apache/solr/TestCrossCoreJoin.java    |    4 +-
 .../org/apache/solr/TestDistributedSearch.java     |   36 +-
 .../test/org/apache/solr/TestGroupingSearch.java   |  200 +-
 solr/core/src/test/org/apache/solr/TestJoin.java   |   26 +-
 .../test/org/apache/solr/TestRandomDVFaceting.java |    7 +-
 .../test/org/apache/solr/TestRandomFaceting.java   |    7 +-
 .../TestEmbeddedSolrServerAdminHandler.java        |    9 +-
 .../TestEmbeddedSolrServerConstructors.java        |    4 +-
 .../cloud/AssignBackwardCompatibilityTest.java     |    4 +-
 .../apache/solr/cloud/BasicDistributedZk2Test.java |    3 +-
 .../apache/solr/cloud/BasicDistributedZkTest.java  |    4 +-
 .../solr/cloud/ChaosMonkeyNothingIsSafeTest.java   |    5 +-
 ...aosMonkeyNothingIsSafeWithPullReplicasTest.java |    8 +-
 .../solr/cloud/ChaosMonkeySafeLeaderTest.java      |    4 +-
 .../ChaosMonkeySafeLeaderWithPullReplicasTest.java |   13 +-
 .../solr/cloud/ChaosMonkeyShardSplitTest.java      |   22 +-
 .../org/apache/solr/cloud/CollectionPropsTest.java |    4 +-
 .../apache/solr/cloud/ConnectionManagerTest.java   |    4 +-
 .../solr/cloud/DeleteInactiveReplicaTest.java      |    4 +-
 .../test/org/apache/solr/cloud/DeleteNodeTest.java |    6 +-
 .../org/apache/solr/cloud/DeleteReplicaTest.java   |    4 +-
 .../solr/cloud/DistribJoinFromCollectionTest.java  |    8 +-
 .../apache/solr/cloud/DistributedQueueTest.java    |    4 +-
 .../solr/cloud/DistributedVersionInfoTest.java     |    8 +-
 .../org/apache/solr/cloud/ForceLeaderTest.java     |   24 +-
 .../solr/cloud/FullSolrCloudDistribCmdsTest.java   |    4 +-
 .../cloud/FullThrottleStoppableIndexingThread.java |    2 +-
 .../apache/solr/cloud/HealthCheckHandlerTest.java  |  179 -
 .../solr/cloud/HttpPartitionOnCommitTest.java      |   33 +-
 .../org/apache/solr/cloud/HttpPartitionTest.java   |   32 +-
 .../apache/solr/cloud/KerberosTestServices.java    |    2 +-
 .../org/apache/solr/cloud/LeaderElectionTest.java  |    8 +-
 .../cloud/LeaderFailoverAfterPartitionTest.java    |    4 +-
 .../cloud/LeaderFailureAfterFreshStartTest.java    |    8 +-
 .../apache/solr/cloud/LeaderTragicEventTest.java   |   17 +-
 .../solr/cloud/LeaderVoteWaitTimeoutTest.java      |    4 +-
 .../org/apache/solr/cloud/MigrateRouteKeyTest.java |    9 +-
 .../apache/solr/cloud/MockSimpleZkController.java  |    5 +-
 .../org/apache/solr/cloud/MoveReplicaTest.java     |   18 +-
 .../apache/solr/cloud/MultiThreadedOCPTest.java    |    3 -
 .../OutOfBoxZkACLAndCredentialsProvidersTest.java  |   12 +-
 ...OverriddenZkACLAndCredentialsProvidersTest.java |   10 +-
 .../OverseerCollectionConfigSetProcessorTest.java  |    4 +-
 .../org/apache/solr/cloud/OverseerRolesTest.java   |   10 +-
 .../org/apache/solr/cloud/OverseerStatusTest.java  |    4 +-
 .../test/org/apache/solr/cloud/OverseerTest.java   |   36 +-
 .../apache/solr/cloud/PeerSyncReplicationTest.java |   14 +-
 .../apache/solr/cloud/ReplaceNodeNoTargetTest.java |   16 +-
 .../org/apache/solr/cloud/ReplaceNodeTest.java     |   10 +-
 .../apache/solr/cloud/ReplicationFactorTest.java   |   13 +-
 .../solr/cloud/RestartWhileUpdatingTest.java       |    5 +-
 .../org/apache/solr/cloud/RollingRestartTest.java  |   10 +-
 .../org/apache/solr/cloud/SSLMigrationTest.java    |    2 +-
 .../apache/solr/cloud/SaslZkACLProviderTest.java   |   10 +-
 .../cloud/SharedFSAutoReplicaFailoverTest.java     |    4 +-
 .../apache/solr/cloud/SolrCloudExampleTest.java    |   21 +-
 .../org/apache/solr/cloud/SolrXmlInZkTest.java     |    8 +-
 .../test/org/apache/solr/cloud/SplitShardTest.java |   12 +-
 .../test/org/apache/solr/cloud/SyncSliceTest.java  |    6 +-
 .../solr/cloud/SystemCollectionCompatTest.java     |   14 +-
 .../solr/cloud/TestAuthenticationFramework.java    |   10 +-
 .../apache/solr/cloud/TestCloudConsistency.java    |    4 +-
 .../org/apache/solr/cloud/TestCloudPivotFacet.java |    7 +-
 .../solr/cloud/TestCloudSearcherWarming.java       |   18 +-
 .../org/apache/solr/cloud/TestConfigSetsAPI.java   |   14 +-
 .../solr/cloud/TestConfigSetsAPIZkFailure.java     |    2 +-
 .../cloud/TestDynamicFieldNamesIndexCorrectly.java |    7 +-
 .../solr/cloud/TestLeaderElectionZkExpiry.java     |   13 +-
 .../solr/cloud/TestMiniSolrCloudClusterSSL.java    |   16 +-
 .../solr/cloud/TestOnReconnectListenerSupport.java |    3 +-
 .../org/apache/solr/cloud/TestPullReplica.java     |    8 +-
 .../solr/cloud/TestPullReplicaErrorHandling.java   |    4 +-
 .../solr/cloud/TestRandomRequestDistribution.java  |   13 +-
 .../cloud/TestSolrCloudWithDelegationTokens.java   |   38 +-
 .../solr/cloud/TestSolrCloudWithKerberosAlt.java   |    4 +-
 .../TestSolrCloudWithSecureImpersonation.java      |   17 +-
 .../cloud/TestStressCloudBlindAtomicUpdates.java   |    7 +-
 .../solr/cloud/TestStressInPlaceUpdates.java       |   19 +-
 .../org/apache/solr/cloud/TestStressLiveNodes.java |   12 +-
 .../solr/cloud/TestTlogReplayVsRecovery.java       |    4 +-
 .../org/apache/solr/cloud/TestTlogReplica.java     |    2 +-
 .../TestTolerantUpdateProcessorRandomCloud.java    |   10 +-
 .../org/apache/solr/cloud/TestUtilizeNode.java     |   29 +-
 .../cloud/TestWaitForStateWithJettyShutdowns.java  |   10 +-
 .../org/apache/solr/cloud/TestWithCollection.java  |   16 +-
 .../test/org/apache/solr/cloud/TestZkChroot.java   |   26 +-
 .../cloud/TlogReplayBufferedWhileIndexingTest.java |    5 +-
 .../apache/solr/cloud/UnloadDistributedZkTest.java |    6 +-
 .../VMParamsZkACLAndCredentialsProvidersTest.java  |   10 +-
 .../src/test/org/apache/solr/cloud/ZkCLITest.java  |   10 +-
 .../org/apache/solr/cloud/ZkControllerTest.java    |   39 +-
 .../org/apache/solr/cloud/ZkNodePropsTest.java     |    4 +-
 .../AbstractCloudBackupRestoreTestCase.java        |   10 +-
 .../api/collections/CollectionReloadTest.java      |    2 +-
 .../CollectionsAPIAsyncDistributedZkTest.java      |   12 +-
 .../CollectionsAPIDistributedZkTest.java           |   19 +-
 .../ConcurrentCreateCollectionTest.java            |   18 +-
 .../solr/cloud/api/collections/ShardSplitTest.java |   86 +-
 .../cloud/api/collections/SplitByPrefixTest.java   |    2 +-
 .../cloud/api/collections/TestCollectionAPI.java   |   24 +-
 .../TestCollectionsAPIViaSolrCloudCluster.java     |    4 +-
 .../AutoAddReplicasIntegrationTest.java            |  106 +-
 .../autoscaling/AutoAddReplicasPlanActionTest.java |   12 +-
 .../cloud/autoscaling/AutoScalingHandlerTest.java  |   42 +-
 .../cloud/autoscaling/ComputePlanActionTest.java   |  100 +-
 .../IndexSizeTriggerMixedBoundsTest.java           |    2 +-
 .../IndexSizeTriggerSizeEstimationTest.java        |    2 +-
 .../cloud/autoscaling/IndexSizeTriggerTest.java    |   25 +-
 .../NodeAddedTriggerIntegrationTest.java           |    4 +-
 .../NodeLostTriggerIntegrationTest.java            |    4 +-
 .../ScheduledMaintenanceTriggerTest.java           |    4 +-
 .../SearchRateTriggerIntegrationTest.java          |    6 +-
 .../cloud/autoscaling/SystemLogListenerTest.java   |    4 +-
 .../solr/cloud/autoscaling/TestPolicyCloud.java    |   18 +-
 .../cloud/autoscaling/TriggerIntegrationTest.java  |   25 +-
 .../TriggerSetPropertiesIntegrationTest.java       |   42 +-
 .../autoscaling/sim/SimSolrCloudTestCase.java      |    4 +-
 .../sim/TestSimClusterStateProvider.java           |    2 +-
 .../autoscaling/sim/TestSimComputePlanAction.java  |   34 +-
 .../sim/TestSimDistribStateManager.java            |    4 +-
 .../autoscaling/sim/TestSimDistributedQueue.java   |    4 +-
 .../autoscaling/sim/TestSimExecutePlanAction.java  |   28 +-
 .../autoscaling/sim/TestSimExtremeIndexing.java    |    4 +-
 .../cloud/autoscaling/sim/TestSimLargeCluster.java |   92 +-
 .../cloud/autoscaling/sim/TestSimPolicyCloud.java  |    3 +
 .../cloud/autoscaling/sim/TestSimScenario.java     |    3 +
 .../autoscaling/sim/TestSimTriggerIntegration.java |   16 +-
 .../autoscaling/sim/TestSnapshotCloudManager.java  |   17 +-
 .../solr/cloud/cdcr/BaseCdcrDistributedZkTest.java |    8 +-
 .../solr/cloud/cdcr/CdcrBidirectionalTest.java     |   32 +-
 .../apache/solr/cloud/cdcr/CdcrBootstrapTest.java  |   30 +-
 .../cloud/cdcr/CdcrReplicationHandlerTest.java     |    8 +-
 .../org/apache/solr/cloud/cdcr/CdcrTestsUtil.java  |    6 +-
 .../cloud/cdcr/CdcrVersionReplicationTest.java     |    2 +-
 .../apache/solr/cloud/hdfs/HdfsSyncSliceTest.java  |    2 -
 .../test/org/apache/solr/cloud/rule/RulesTest.java |    6 +-
 .../solr/common/cloud/ZkDynamicConfigTest.java     |   62 +
 .../test/org/apache/solr/core/CoreSorterTest.java  |  306 +-
 .../org/apache/solr/core/DirectoryFactoryTest.java |    2 +-
 .../org/apache/solr/core/QueryResultKeyTest.java   |   16 +-
 .../org/apache/solr/core/ResourceLoaderTest.java   |    9 +-
 .../solr/core/SolrCoreCheckLockOnStartupTest.java  |    4 +-
 .../test/org/apache/solr/core/SolrCoreTest.java    |    6 +-
 .../src/test/org/apache/solr/core/TestConfig.java  |   38 +-
 .../test/org/apache/solr/core/TestConfigSets.java  |   30 +-
 .../org/apache/solr/core/TestCoreContainer.java    |   19 +-
 .../org/apache/solr/core/TestCoreDiscovery.java    |   11 +-
 .../org/apache/solr/core/TestJmxIntegration.java   |   12 +-
 .../test/org/apache/solr/core/TestLazyCores.java   |   74 +-
 .../src/test/org/apache/solr/core/TestSolrXml.java |  120 +-
 .../repository/HdfsBackupRepositoryTest.java       |   94 +-
 .../solr/filestore/TestDistribPackageStore.java    |    3 +-
 .../org/apache/solr/handler/TestConfigReload.java  |    8 +-
 .../solr/handler/TestReplicationHandler.java       |   13 +-
 .../TestReplicationHandlerDiskOverFlow.java        |    4 +-
 .../org/apache/solr/handler/TestSQLHandler.java    |    3 +-
 .../handler/TestSolrConfigHandlerConcurrent.java   |    2 +-
 .../apache/solr/handler/V2ApiIntegrationTest.java  |    4 +-
 .../admin/AutoscalingHistoryHandlerTest.java       |   24 +-
 .../solr/handler/admin/HealthCheckHandlerTest.java |  236 +
 .../solr/handler/admin/LukeRequestHandlerTest.java |   15 +
 .../admin/SegmentsInfoRequestHandlerTest.java      |   16 +-
 .../solr/handler/admin/TestApiFramework.java       |   27 +-
 .../solr/handler/admin/ZookeeperReadAPITest.java   |  101 +
 .../handler/admin/ZookeeperStatusHandlerTest.java  |   55 +-
 .../component/DistributedExpandComponentTest.java  |   49 +
 .../component/QueryElevationComponentTest.java     |    4 +-
 .../component/ResourceSharingTestComponent.java    |    4 +-
 .../solr/handler/component/StatsComponentTest.java |  110 +-
 .../handler/component/TestExpandComponent.java     |   96 +
 .../org/apache/solr/handler/tagger/TaggerTest.java |   22 +-
 .../apache/solr/handler/tagger/TaggerTestCase.java |    6 +-
 .../tagger/WordLengthTaggingFilterFactory.java     |    5 +
 .../org/apache/solr/metrics/JvmMetricsTest.java    |    8 +-
 .../org/apache/solr/metrics/MetricsConfigTest.java |   13 +-
 .../apache/solr/metrics/SolrMetricManagerTest.java |    9 +-
 .../solr/metrics/SolrMetricsIntegrationTest.java   |    5 +-
 .../reporters/SolrGraphiteReporterTest.java        |    3 +-
 .../reporters/SolrJmxReporterCloudTest.java        |    6 +-
 .../metrics/reporters/SolrSlf4jReporterTest.java   |    3 +-
 .../reporters/solr/SolrCloudReportersTest.java     |    4 +-
 .../org/apache/solr/request/SimpleFacetsTest.java  |    2 +-
 .../test/org/apache/solr/request/TestFaceting.java |   25 +
 .../org/apache/solr/request/TestWriterPerf.java    |    5 +-
 .../org/apache/solr/response/JSONWriterTest.java   |    8 +-
 .../response/TestPHPSerializedResponseWriter.java  |    2 +-
 .../org/apache/solr/response/TestPushWriter.java   |    4 +-
 .../transform/TestChildDocTransformer.java         |   28 +-
 .../apache/solr/schema/ChangedSchemaMergeTest.java |    6 +-
 .../apache/solr/schema/CurrencyFieldTypeTest.java  |   94 +-
 .../test/org/apache/solr/schema/DateFieldTest.java |    2 +-
 .../test/org/apache/solr/schema/DocValuesTest.java |    4 +-
 .../apache/solr/schema/PrimitiveFieldTypeTest.java |    2 +-
 .../apache/solr/schema/SchemaApiFailureTest.java   |    4 +-
 .../solr/schema/TestBulkSchemaConcurrent.java      |    2 +-
 .../apache/solr/schema/TestManagedSchemaAPI.java   |    2 +-
 .../solr/schema/TestManagedSchemaThreadSafety.java |    4 +-
 .../apache/solr/schema/TestSortableTextField.java  |    6 +-
 .../apache/solr/schema/WrappedIntPointField.java   |   20 +-
 .../solr/search/CurrencyRangeFacetCloudTest.java   |   64 +-
 .../apache/solr/search/SolrIndexSearcherTest.java  |  330 ++
 .../solr/search/TestAddFieldRealTimeGet.java       |    8 +-
 .../solr/search/TestCollapseQParserPlugin.java     |   92 +
 .../test/org/apache/solr/search/TestDocSet.java    |    3 +-
 .../solr/search/TestExtendedDismaxParser.java      |   41 +-
 .../org/apache/solr/search/TestRealTimeGet.java    |    6 +-
 .../org/apache/solr/search/TestReloadDeadlock.java |    2 +-
 .../src/test/org/apache/solr/search/TestSort.java  |    5 +-
 .../org/apache/solr/search/TestStressReorder.java  |    4 +-
 .../apache/solr/search/TestStressUserVersions.java |    6 +-
 .../org/apache/solr/search/facet/DebugAgg.java     |   12 +-
 ...stributedFacetSimpleRefinementLongTailTest.java |   38 +-
 .../solr/search/facet/RangeFacetCloudTest.java     |   14 +-
 .../solr/search/facet/TestCloudJSONFacetSKG.java   |  278 +-
 .../search/facet/TestCloudJSONFacetSKGEquiv.java   |  989 ++++
 .../solr/search/facet/TestJsonFacetErrors.java     |  400 ++
 .../solr/search/facet/TestJsonFacetRefinement.java |    2 +-
 .../apache/solr/search/facet/TestJsonFacets.java   |  717 +--
 .../facet/TestJsonFacetsWithNestedObjects.java     |   12 +-
 .../solr/search/facet/TestJsonRangeFacets.java     |  435 ++
 .../solr/search/function/TestFunctionQuery.java    |   32 +
 .../solr/search/join/TestScoreJoinQPNoScore.java   |   27 +-
 .../solr/search/join/TestScoreJoinQPScore.java     |   18 +-
 .../join/another/BJQFilterAccessibleTest.java      |    2 +-
 .../apache/solr/search/json/TestJsonRequest.java   |   39 +-
 .../solr/security/AuditLoggerIntegrationTest.java  |   10 +-
 .../BaseTestRuleBasedAuthorizationPlugin.java      |  600 +++
 .../solr/security/BasicAuthIntegrationTest.java    |   17 +-
 .../solr/security/CallbackAuditLoggerPlugin.java   |    4 +-
 .../apache/solr/security/CertAuthPluginTest.java   |   79 +
 .../apache/solr/security/JWTAuthPluginTest.java    |   22 +-
 .../solr/security/MockAuditLoggerPlugin.java       |    4 +-
 .../solr/security/MockAuthenticationPlugin.java    |   25 +-
 .../solr/security/MockAuthorizationPlugin.java     |    2 +-
 .../solr/security/PrincipalWithUserRoles.java      |   91 +
 .../solr/security/TestAuthorizationFramework.java  |    3 +-
 ...stExternalRoleRuleBasedAuthorizationPlugin.java |   78 +
 .../security/TestRuleBasedAuthorizationPlugin.java |  585 ---
 .../hadoop/TestDelegationWithHadoopAuth.java       |    9 +-
 .../hadoop/TestImpersonationWithHadoopAuth.java    |   13 +-
 .../hadoop/TestSolrCloudWithHadoopAuthPlugin.java  |    2 +-
 .../SpellCheckCollatorWithCollapseTest.java        |   37 +-
 .../suggest/RandomTestDictionaryFactory.java       |    6 +-
 .../org/apache/solr/update/AddBlockUpdateTest.java |    6 +-
 .../solr/update/DirectUpdateHandlerTest.java       |   12 +-
 .../apache/solr/update/SolrIndexConfigTest.java    |   12 +-
 .../solr/update/TestInPlaceUpdatesDistrib.java     |   73 +-
 .../solr/update/TestIndexingPerformance.java       |    8 +-
 .../CategoryRoutedAliasUpdateProcessorTest.java    |    6 +-
 .../DimensionalRoutedAliasUpdateProcessorTest.java |    6 +-
 .../processor/RoutedAliasUpdateProcessorTest.java  |    4 +-
 .../processor/TestDocBasedVersionConstraints.java  |    4 +-
 .../TimeRoutedAliasUpdateProcessorTest.java        |    6 +-
 .../UpdateRequestProcessorFactoryTest.java         |    2 +-
 .../test/org/apache/solr/util/DynamicMapsTest.java |   90 +
 .../org/apache/solr/util/OrderedExecutorTest.java  |   28 +-
 .../org/apache/solr/util/SolrLogPostToolTest.java  |   44 +-
 .../apache/solr/util/TestSolrCLIRunExample.java    |   16 +-
 solr/example/README.md                             |   95 +
 solr/example/README.txt                            |   78 -
 solr/example/build.gradle                          |    4 +-
 solr/example/example-DIH/README.md                 |   53 +
 solr/example/example-DIH/README.txt                |   49 -
 solr/example/files/README.md                       |  167 +
 solr/example/files/README.txt                      |  152 -
 solr/example/films/README.md                       |  161 +
 solr/example/films/README.txt                      |  138 -
 solr/licenses/caffeine-2.8.0.jar.sha1              |    1 -
 solr/licenses/caffeine-2.8.4.jar.sha1              |    1 +
 solr/licenses/commons-cli-1.2.jar.sha1             |    1 -
 solr/licenses/commons-cli-1.4.jar.sha1             |    1 +
 solr/licenses/commons-fileupload-1.3.3.jar.sha1    |    1 -
 solr/licenses/commons-fileupload-LICENSE-ASL.txt   |  202 -
 solr/licenses/commons-fileupload-NOTICE.txt        |    5 -
 .../http2-client-9.4.24.v20191120.jar.sha1         |    1 -
 .../http2-client-9.4.27.v20200227.jar.sha1         |    1 +
 .../http2-common-9.4.24.v20191120.jar.sha1         |    1 -
 .../http2-common-9.4.27.v20200227.jar.sha1         |    1 +
 .../licenses/http2-hpack-9.4.24.v20191120.jar.sha1 |    1 -
 .../licenses/http2-hpack-9.4.27.v20200227.jar.sha1 |    1 +
 ...http-client-transport-9.4.24.v20191120.jar.sha1 |    1 -
 ...http-client-transport-9.4.27.v20200227.jar.sha1 |    1 +
 .../http2-server-9.4.24.v20191120.jar.sha1         |    1 -
 .../http2-server-9.4.27.v20200227.jar.sha1         |    1 +
 .../jetty-alpn-client-9.4.24.v20191120.jar.sha1    |    1 -
 .../jetty-alpn-client-9.4.27.v20200227.jar.sha1    |    1 +
 ...etty-alpn-java-client-9.4.24.v20191120.jar.sha1 |    1 -
 ...etty-alpn-java-client-9.4.27.v20200227.jar.sha1 |    1 +
 ...etty-alpn-java-server-9.4.24.v20191120.jar.sha1 |    1 -
 ...etty-alpn-java-server-9.4.27.v20200227.jar.sha1 |    1 +
 .../jetty-alpn-server-9.4.24.v20191120.jar.sha1    |    1 -
 .../jetty-alpn-server-9.4.27.v20200227.jar.sha1    |    1 +
 .../jetty-client-9.4.24.v20191120.jar.sha1         |    1 -
 .../jetty-client-9.4.27.v20200227.jar.sha1         |    1 +
 .../jetty-continuation-9.4.24.v20191120.jar.sha1   |    1 -
 .../jetty-continuation-9.4.27.v20200227.jar.sha1   |    1 +
 .../jetty-deploy-9.4.24.v20191120.jar.sha1         |    1 -
 .../jetty-deploy-9.4.27.v20200227.jar.sha1         |    1 +
 solr/licenses/jetty-http-9.4.24.v20191120.jar.sha1 |    1 -
 solr/licenses/jetty-http-9.4.27.v20200227.jar.sha1 |    1 +
 solr/licenses/jetty-io-9.4.24.v20191120.jar.sha1   |    1 -
 solr/licenses/jetty-io-9.4.27.v20200227.jar.sha1   |    1 +
 solr/licenses/jetty-jmx-9.4.24.v20191120.jar.sha1  |    1 -
 solr/licenses/jetty-jmx-9.4.27.v20200227.jar.sha1  |    1 +
 .../jetty-rewrite-9.4.24.v20191120.jar.sha1        |    1 -
 .../jetty-rewrite-9.4.27.v20200227.jar.sha1        |    1 +
 .../jetty-security-9.4.24.v20191120.jar.sha1       |    1 -
 .../jetty-security-9.4.27.v20200227.jar.sha1       |    1 +
 .../jetty-server-9.4.24.v20191120.jar.sha1         |    1 -
 .../jetty-server-9.4.27.v20200227.jar.sha1         |    1 +
 .../jetty-servlet-9.4.24.v20191120.jar.sha1        |    1 -
 .../jetty-servlet-9.4.27.v20200227.jar.sha1        |    1 +
 .../jetty-servlets-9.4.24.v20191120.jar.sha1       |    1 -
 .../jetty-servlets-9.4.27.v20200227.jar.sha1       |    1 +
 .../jetty-start-9.4.24.v20191120-shaded.jar.sha1   |    1 -
 .../jetty-start-9.4.27.v20200227-shaded.jar.sha1   |    1 +
 solr/licenses/jetty-util-9.4.24.v20191120.jar.sha1 |    1 -
 solr/licenses/jetty-util-9.4.27.v20200227.jar.sha1 |    1 +
 .../jetty-webapp-9.4.24.v20191120.jar.sha1         |    1 -
 .../jetty-webapp-9.4.27.v20200227.jar.sha1         |    1 +
 solr/licenses/jetty-xml-9.4.24.v20191120.jar.sha1  |    1 -
 solr/licenses/jetty-xml-9.4.27.v20200227.jar.sha1  |    1 +
 solr/licenses/log4j-1.2-api-2.11.2.jar.sha1        |    1 -
 solr/licenses/log4j-1.2-api-2.13.2.jar.sha1        |    1 +
 solr/licenses/log4j-api-2.11.2.jar.sha1            |    1 -
 solr/licenses/log4j-api-2.13.2.jar.sha1            |    1 +
 solr/licenses/log4j-core-2.11.2.jar.sha1           |    1 -
 solr/licenses/log4j-core-2.13.2.jar.sha1           |    1 +
 solr/licenses/log4j-slf4j-impl-2.11.2.jar.sha1     |    1 -
 solr/licenses/log4j-slf4j-impl-2.13.2.jar.sha1     |    1 +
 solr/licenses/log4j-web-2.11.2.jar.sha1            |    1 -
 solr/licenses/log4j-web-2.13.2.jar.sha1            |    1 +
 solr/licenses/metrics-core-4.1.2.jar.sha1          |    1 -
 solr/licenses/metrics-core-4.1.5.jar.sha1          |    1 +
 solr/licenses/metrics-graphite-4.1.2.jar.sha1      |    1 -
 solr/licenses/metrics-graphite-4.1.5.jar.sha1      |    1 +
 solr/licenses/metrics-jetty9-4.1.2.jar.sha1        |    1 -
 solr/licenses/metrics-jetty9-4.1.5.jar.sha1        |    1 +
 solr/licenses/metrics-jmx-4.1.2.jar.sha1           |    1 -
 solr/licenses/metrics-jmx-4.1.5.jar.sha1           |    1 +
 solr/licenses/metrics-jvm-4.1.2.jar.sha1           |    1 -
 solr/licenses/metrics-jvm-4.1.5.jar.sha1           |    1 +
 .../morfologik-ukrainian-search-3.9.0.jar.sha1     |    1 -
 .../morfologik-ukrainian-search-4.9.1.jar.sha1     |    1 +
 solr/licenses/netty-buffer-4.1.29.Final.jar.sha1   |    1 -
 solr/licenses/netty-buffer-4.1.47.Final.jar.sha1   |    1 +
 solr/licenses/netty-codec-4.1.29.Final.jar.sha1    |    1 -
 solr/licenses/netty-codec-4.1.47.Final.jar.sha1    |    1 +
 solr/licenses/netty-common-4.1.29.Final.jar.sha1   |    1 -
 solr/licenses/netty-common-4.1.47.Final.jar.sha1   |    1 +
 solr/licenses/netty-handler-4.1.29.Final.jar.sha1  |    1 -
 solr/licenses/netty-handler-4.1.47.Final.jar.sha1  |    1 +
 solr/licenses/netty-resolver-4.1.29.Final.jar.sha1 |    1 -
 solr/licenses/netty-resolver-4.1.47.Final.jar.sha1 |    1 +
 .../licenses/netty-transport-4.1.29.Final.jar.sha1 |    1 -
 .../licenses/netty-transport-4.1.47.Final.jar.sha1 |    1 +
 ...ty-transport-native-epoll-4.1.29.Final.jar.sha1 |    1 -
 ...ty-transport-native-epoll-4.1.47.Final.jar.sha1 |    1 +
 ...nsport-native-unix-common-4.1.29.Final.jar.sha1 |    1 -
 ...nsport-native-unix-common-4.1.47.Final.jar.sha1 |    1 +
 solr/licenses/start.jar.sha1                       |    2 +-
 solr/licenses/tika-core-1.23.jar.sha1              |    1 -
 solr/licenses/tika-core-1.24.jar.sha1              |    1 +
 solr/licenses/tika-java7-1.23.jar.sha1             |    1 -
 solr/licenses/tika-java7-1.24.jar.sha1             |    1 +
 solr/licenses/tika-parsers-1.23.jar.sha1           |    1 -
 solr/licenses/tika-parsers-1.24.jar.sha1           |    1 +
 solr/licenses/tika-xmp-1.23.jar.sha1               |    1 -
 solr/licenses/tika-xmp-1.24.jar.sha1               |    1 +
 solr/licenses/zookeeper-3.5.5.jar.sha1             |    1 -
 solr/licenses/zookeeper-3.5.7.jar.sha1             |    1 +
 solr/licenses/zookeeper-jute-3.5.5.jar.sha1        |    1 -
 solr/licenses/zookeeper-jute-3.5.7.jar.sha1        |    1 +
 solr/packaging/build.gradle                        |   14 +-
 solr/server/README.md                              |  114 +
 solr/server/README.txt                             |  109 -
 solr/server/build.gradle                           |    4 +-
 solr/server/solr/README.md                         |   79 +
 solr/server/solr/README.txt                        |   77 -
 .../clustering/carrot2/{README.txt => README.md}   |    0
 .../conf/velocity/README.md                        |  116 +
 .../conf/velocity/README.txt                       |  101 -
 ..._REQUIREMENTS.mdtext => SYSTEM_REQUIREMENTS.md} |    0
 solr/site/index.template.md                        |   37 +
 solr/site/online-link.template.md                  |   19 +
 solr/solr-ref-guide/build.gradle                   |    5 +-
 solr/solr-ref-guide/build.xml                      |    1 -
 solr/solr-ref-guide/src/_config.yml.template       |    2 -
 solr/solr-ref-guide/src/_includes/head.html        |   26 +-
 solr/solr-ref-guide/src/_includes/head_print.html  |   29 -
 solr/solr-ref-guide/src/_includes/sidebar.html     |   35 +-
 solr/solr-ref-guide/src/_includes/taglogic.html    |   22 -
 solr/solr-ref-guide/src/_includes/toc.html         |    9 -
 solr/solr-ref-guide/src/_includes/topnav.html      |   39 +-
 solr/solr-ref-guide/src/_layouts/default.html      |   33 +-
 .../solr-ref-guide/src/_layouts/default_print.html |   25 -
 solr/solr-ref-guide/src/_layouts/home.html         |   27 +-
 solr/solr-ref-guide/src/_layouts/page.html         |   29 +-
 solr/solr-ref-guide/src/_layouts/page_print.html   |   15 -
 solr/solr-ref-guide/src/_templates/open.html.slim  |    2 +-
 .../src/analytics-expression-sources.adoc          |    1 -
 .../src/analytics-mapping-functions.adoc           |    1 -
 .../src/analytics-reduction-functions.adoc         |    3 +-
 solr/solr-ref-guide/src/analytics.adoc             |    3 +-
 .../authentication-and-authorization-plugins.adoc  |    2 +-
 solr/solr-ref-guide/src/cdcr-config.adoc           |    1 +
 .../src/cert-authentication-plugin.adoc            |   61 +
 .../src/cluster-node-management.adoc               |   22 +-
 .../src/collapse-and-expand-results.adoc           |   21 +
 solr/solr-ref-guide/src/collection-aliasing.adoc   |    3 +-
 solr/solr-ref-guide/src/collection-management.adoc |    3 +-
 solr/solr-ref-guide/src/collections-api.adoc       |    2 +-
 .../solr-ref-guide/src/colocating-collections.adoc |    3 +-
 .../solr-ref-guide/src/command-line-utilities.adoc |    2 +-
 .../src/common-query-parameters.adoc               |   45 +-
 solr/solr-ref-guide/src/config-api.adoc            |   32 +
 solr/solr-ref-guide/src/configsets-api.adoc        |    2 +-
 solr/solr-ref-guide/src/coreadmin-api.adoc         |    2 +-
 solr/solr-ref-guide/src/css/customstyles.css       |  919 ----
 solr/solr-ref-guide/src/css/decoration.css         |  254 +
 solr/solr-ref-guide/src/css/font-awesome.min.css   |    4 -
 solr/solr-ref-guide/src/css/lavish-bootstrap.css   | 5423 --------------------
 solr/solr-ref-guide/src/css/navs.css               |  366 ++
 solr/solr-ref-guide/src/css/printstyles.css        |  160 -
 solr/solr-ref-guide/src/css/ref-guide.css          | 2893 +++++------
 solr/solr-ref-guide/src/css/search.css             |   47 +
 solr/solr-ref-guide/src/css/theme-solr.css         |  147 -
 solr/solr-ref-guide/src/distributed-requests.adoc  |    4 +
 solr/solr-ref-guide/src/filter-descriptions.adoc   |    6 +-
 .../glyphicons/glyphicons-halflings-regular.eot    |  Bin 20127 -> 0 bytes
 .../glyphicons/glyphicons-halflings-regular.svg    |  288 --
 .../glyphicons/glyphicons-halflings-regular.ttf    |  Bin 45404 -> 0 bytes
 .../glyphicons/glyphicons-halflings-regular.woff   |  Bin 23424 -> 0 bytes
 .../glyphicons/glyphicons-halflings-regular.woff2  |  Bin 18028 -> 0 bytes
 .../src/fonts/mplus1mn/mplus1mn-bold-ascii.ttf     |  Bin 15868 -> 0 bytes
 .../fonts/mplus1mn/mplus1mn-bold_italic-ascii.ttf  |  Bin 15908 -> 0 bytes
 .../src/fonts/mplus1mn/mplus1mn-italic-ascii.ttf   |  Bin 15928 -> 0 bytes
 .../mplus1mn/mplus1mn-regular-ascii-conums.ttf     |  Bin 20024 -> 0 bytes
 .../src/fonts/mplus1p-regular-fallback.ttf         |  Bin 1405716 -> 0 bytes
 solr/solr-ref-guide/src/function-queries.adoc      |    1 -
 solr/solr-ref-guide/src/how-to-contribute.adoc     |   16 +-
 .../src/implicit-requesthandlers.adoc              |    2 +
 solr/solr-ref-guide/src/index.adoc                 |   31 +-
 .../src/indexing-nested-documents.adoc             |    2 +-
 solr/solr-ref-guide/src/installing-solr.adoc       |    2 +-
 solr/solr-ref-guide/src/js/customscripts.js        |   68 +-
 solr/solr-ref-guide/src/js/ref-guide-toc.js        |   36 -
 solr/solr-ref-guide/src/js/toc.js                  |   82 -
 solr/solr-ref-guide/src/json-facet-api.adoc        |    5 +-
 solr/solr-ref-guide/src/json-query-dsl.adoc        |    2 +-
 .../src/jwt-authentication-plugin.adoc             |    3 +-
 solr/solr-ref-guide/src/language-analysis.adoc     |   20 +-
 solr/solr-ref-guide/src/learning-to-rank.adoc      |    2 +-
 .../src/major-changes-in-solr-7.adoc               |    1 -
 .../src/major-changes-in-solr-8.adoc               |    1 -
 .../src/major-changes-in-solr-9.adoc               |   18 +-
 solr/solr-ref-guide/src/meta-docs/jekyll.adoc      |   72 +-
 solr/solr-ref-guide/src/meta-docs/publish.adoc     |    6 +-
 solr/solr-ref-guide/src/metrics-reporting.adoc     |    2 +-
 .../src/overview-of-the-solr-admin-ui.adoc         |    2 +-
 .../src/package-manager-internals.adoc             |    1 -
 solr/solr-ref-guide/src/package-manager.adoc       |    1 -
 solr/solr-ref-guide/src/relevance.adoc             |    4 +-
 solr/solr-ref-guide/src/replica-management.adoc    |    3 +-
 solr/solr-ref-guide/src/result-clustering.adoc     |    4 +-
 .../src/rule-based-authorization-plugin.adoc       |   63 +-
 solr/solr-ref-guide/src/schema-api.adoc            |    1 -
 solr/solr-ref-guide/src/securing-solr.adoc         |    6 +-
 .../setting-up-an-external-zookeeper-ensemble.adoc |    2 +
 solr/solr-ref-guide/src/shard-management.adoc      |    3 +-
 .../src/shards-and-indexing-data-in-solrcloud.adoc |    2 +-
 .../src/solr-control-script-reference.adoc         |    1 -
 solr/solr-ref-guide/src/solr-glossary.adoc         |    2 +-
 solr/solr-ref-guide/src/solr-tutorial.adoc         |    3 +-
 solr/solr-ref-guide/src/solr-upgrade-notes.adoc    |   14 +-
 .../src/solrcloud-autoscaling-api.adoc             |    3 +-
 .../src/solrcloud-autoscaling-overview.adoc        |   24 +-
 .../solrcloud-autoscaling-policy-preferences.adoc  |    3 +-
 .../src/solrcloud-autoscaling-trigger-actions.adoc |   71 +-
 .../src/solrcloud-autoscaling-triggers.adoc        |   26 +-
 solr/solr-ref-guide/src/spatial-search.adoc        |    2 +-
 solr/solr-ref-guide/src/stream-api.adoc            |  216 +
 .../src/stream-decorator-reference.adoc            |    5 +-
 .../src/stream-evaluator-reference.adoc            |    3 +-
 .../src/stream-source-reference.adoc               |    3 +-
 solr/solr-ref-guide/src/streaming-expressions.adoc |    2 +-
 solr/solr-ref-guide/src/the-tagger-handler.adoc    |   11 +-
 solr/solr-ref-guide/src/tokenizers.adoc            |    2 +-
 ...ding-data-with-solr-cell-using-apache-tika.adoc |    1 -
 ...ta-store-data-with-the-data-import-handler.adoc |    2 +-
 solr/solrj/build.gradle                            |    2 +
 .../org/apache/solr/client/solrj/SolrClient.java   |    4 +-
 .../solr/client/solrj/cloud/SocketProxy.java       |   56 +-
 .../client/solrj/cloud/autoscaling/Clause.java     |    5 +-
 .../cloud/autoscaling/MoveReplicaSuggester.java    |    4 +-
 .../client/solrj/cloud/autoscaling/Policy.java     |  142 +-
 .../solrj/cloud/autoscaling/PolicyHelper.java      |   42 +-
 .../client/solrj/cloud/autoscaling/Suggester.java  |    4 +-
 .../client/solrj/cloud/autoscaling/Suggestion.java |    1 -
 .../cloud/autoscaling/UnsupportedSuggester.java    |    2 +-
 .../client/solrj/impl/BaseCloudSolrClient.java     |   18 +-
 .../solrj/impl/BaseHttpClusterStateProvider.java   |   15 +-
 .../solr/client/solrj/impl/BaseHttpSolrClient.java |    6 +-
 .../impl/ConcurrentUpdateHttp2SolrClient.java      |   22 +-
 .../solrj/impl/ConcurrentUpdateSolrClient.java     |   20 +-
 .../solr/client/solrj/impl/Http2SolrClient.java    |   71 +-
 .../solr/client/solrj/impl/HttpClientUtil.java     |   51 +-
 .../solr/client/solrj/impl/HttpSolrClient.java     |   73 +-
 .../client/solrj/impl/Krb5HttpClientBuilder.java   |   12 +-
 .../solr/client/solrj/impl/LBSolrClient.java       |    6 +-
 .../client/solrj/impl/SolrClientCloudManager.java  |   18 +-
 .../solrj/impl/SolrClientNodeStateProvider.java    |  129 +-
 .../solrj/impl/SolrHttpRequestRetryHandler.java    |    4 +-
 .../solr/client/solrj/impl/XMLResponseParser.java  |    6 +-
 .../solrj/impl/ZkClientClusterStateProvider.java   |    2 +-
 .../java/org/apache/solr/client/solrj/io/Lang.java |    4 +
 .../solr/client/solrj/io/SolrClientCache.java      |    2 +-
 .../solrj/io/eval/TimeDifferencingEvaluator.java   |   96 +-
 .../client/solrj/io/graph/GatherNodesStream.java   |    4 +-
 .../client/solrj/io/graph/ShortestPathStream.java  |    4 +-
 .../client/solrj/io/stream/CloudSolrStream.java    |    4 +-
 .../solr/client/solrj/io/stream/DaemonStream.java  |   16 +-
 .../client/solrj/io/stream/DeepRandomStream.java   |    4 +-
 .../client/solrj/io/stream/ExecutorStream.java     |    7 +-
 .../solr/client/solrj/io/stream/FacetStream.java   |   73 +-
 .../solrj/io/stream/FeaturesSelectionStream.java   |    4 +-
 .../solr/client/solrj/io/stream/FetchStream.java   |    1 +
 .../client/solrj/io/stream/ParallelListStream.java |    4 +-
 .../solrj/io/stream/SignificantTermsStream.java    |    4 +-
 .../solr/client/solrj/io/stream/StatsStream.java   |  284 +-
 .../client/solrj/io/stream/TextLogitStream.java    |    4 +-
 .../client/solrj/io/stream/TimeSeriesStream.java   |    8 +-
 .../solr/client/solrj/io/stream/TopicStream.java   |    5 +-
 .../solrj/io/stream/metrics/PercentileMetric.java  |   84 +
 .../client/solrj/io/stream/metrics/StdMetric.java  |   93 +
 .../RequestReplicaListTransformerGenerator.java    |    6 +-
 .../org/apache/solr/common/SolrDocumentList.java   |   10 +
 .../solr/common/cloud/ConnectionManager.java       |    6 +-
 .../solr/common/cloud/NodesSysPropsCacher.java     |    4 +-
 .../org/apache/solr/common/cloud/SolrZkClient.java |   39 +-
 .../apache/solr/common/cloud/ZkConfigManager.java  |    3 +-
 .../apache/solr/common/cloud/ZkDynamicConfig.java  |  144 +
 .../solr/common/cloud/ZkMaintenanceUtils.java      |    2 +-
 .../apache/solr/common/cloud/ZkStateReader.java    |   63 +-
 .../solr/common/params/CollectionParams.java       |    9 +
 .../apache/solr/common/params/CommonParams.java    |    6 +
 .../org/apache/solr/common/util/ExecutorUtil.java  |    2 +-
 .../solr/common/util/FastJavaBinDecoder.java       |    3 +
 .../org/apache/solr/common/util/JavaBinCodec.java  |   17 +-
 .../apache/solr/common/util/MapBackedCache.java    |    4 +
 .../org/apache/solr/common/util/NamedList.java     |   12 +-
 .../org/apache/solr/common/util/RetryUtil.java     |    4 +-
 .../solr/common/util/SolrNamedThreadFactory.java   |   52 +
 .../solr/common/util/SolrjNamedThreadFactory.java  |   49 -
 .../java/org/apache/solr/common/util/StrUtils.java |    2 +-
 .../org/apache/solr/common/util/TimeSource.java    |    4 +-
 .../java/org/apache/solr/common/util/Utils.java    |    4 +-
 .../apache/solr/common/util/XMLErrorLogger.java    |    4 +-
 solr/solrj/src/java/org/noggit/CharArr.java        |  262 +-
 solr/solrj/src/java/org/noggit/JSONParser.java     |    2 +-
 .../src/test-files/solrj/javabin_backcompat.bin    |  Bin 169 -> 170 bytes
 .../autoscaling/testSuggestionsRebalance2.json     |    3 +-
 .../ref_guide_examples/JsonRequestApiTest.java     |    2 +-
 .../org/apache/solr/client/solrj/GetByIdTest.java  |   27 +-
 .../solr/client/solrj/LargeVolumeTestBase.java     |   10 +-
 .../client/solrj/MergeIndexesExampleTestBase.java  |   12 +-
 .../apache/solr/client/solrj/SolrExampleTests.java |   10 +-
 .../solr/client/solrj/TestSolrJErrorHandling.java  |   20 +-
 .../client/solrj/cloud/autoscaling/TestPolicy.java |   45 +-
 .../client/solrj/impl/BasicHttpSolrClientTest.java |   47 +-
 .../solrj/impl/CloudHttp2SolrClientTest.java       |   32 +-
 .../client/solrj/impl/CloudSolrClientTest.java     |   32 +-
 .../impl/ConcurrentUpdateHttp2SolrClientTest.java  |    6 +-
 .../solrj/impl/ConcurrentUpdateSolrClientTest.java |    6 +-
 .../impl/Http2SolrClientCompatibilityTest.java     |    7 +-
 .../client/solrj/impl/Http2SolrClientTest.java     |   41 +-
 .../solr/client/solrj/impl/HttpClientUtilTest.java |   16 +-
 .../solrj/impl/HttpSolrClientConPoolTest.java      |    4 +-
 .../org/apache/solr/client/solrj/io/TestLang.java  |    2 +-
 .../solrj/io/stream/CloudAuthStreamTest.java       |    4 +-
 .../client/solrj/io/stream/MathExpressionTest.java |   60 +-
 .../solrj/io/stream/StreamDecoratorTest.java       |   17 +-
 .../solrj/io/stream/StreamExpressionTest.java      |  293 +-
 .../solr/client/solrj/request/SchemaTest.java      |   12 +-
 .../solr/client/solrj/request/TestV2Request.java   |    2 +-
 ...RequestReplicaListTransformerGeneratorTest.java |   17 +-
 .../apache/solr/common/cloud/SolrZkClientTest.java |   11 +-
 .../solr/common/params/CommonParamsTest.java       |    2 +
 .../apache/solr/common/util/TestJavaBinCodec.java  |   94 +-
 solr/test-framework/{README.txt => README.md}      |    0
 solr/test-framework/build.gradle                   |    3 +-
 solr/test-framework/build.xml                      |    2 +-
 .../apache/solr/BaseDistributedSearchTestCase.java |   11 +-
 .../java/org/apache/solr/SolrJettyTestBase.java    |    2 +-
 .../src/java/org/apache/solr/SolrTestCase.java     |    2 +-
 .../src/java/org/apache/solr/SolrTestCaseHS.java   |   68 +-
 .../src/java/org/apache/solr/SolrTestCaseJ4.java   |   67 +-
 .../solr/analysis/MockCharFilterFactory.java       |    5 +
 .../solr/analysis/MockTokenFilterFactory.java      |    5 +
 .../apache/solr/analysis/MockTokenizerFactory.java |    5 +
 .../solr/cloud/AbstractDistribZkTestBase.java      |   52 +-
 .../solr/cloud/AbstractFullDistribZkTestBase.java  |  133 +-
 .../java/org/apache/solr/cloud/ChaosMonkey.java    |   12 +-
 .../org/apache/solr/cloud/CloudInspectUtil.java    |    6 +-
 .../src/java/org/apache/solr/cloud/IpTables.java   |    4 +-
 .../apache/solr/cloud/MiniSolrCloudCluster.java    |   18 +-
 .../org/apache/solr/cloud/SolrCloudTestCase.java   |    6 +-
 .../apache/solr/cloud/StoppableCommitThread.java   |    4 +-
 .../apache/solr/cloud/StoppableSearchThread.java   |    2 +-
 .../java/org/apache/solr/cloud/ZkTestServer.java   |   20 +-
 .../solr/core/MockConcurrentMergeScheduler.java    |    3 +-
 .../org/apache/solr/util/RandomMergePolicy.java    |    4 +-
 .../java/org/apache/solr/util/RestTestBase.java    |   71 +-
 .../java/org/apache/solr/util/SSLTestConfig.java   |   26 +-
 .../src/java/org/apache/solr/util/TestHarness.java |   24 +-
 solr/webapp/build.gradle                           |    2 +
 solr/webapp/web/css/angular/index.css              |    3 +
 solr/webapp/web/index.html                         |    4 +-
 solr/webapp/web/js/angular/controllers/cloud.js    |    5 +-
 solr/webapp/web/js/angular/controllers/index.js    |    6 +
 solr/webapp/web/js/angular/controllers/login.js    |    2 +-
 solr/webapp/web/libs/angular-chosen.min.js         |    4 +-
 solr/webapp/web/partials/cloud.html                |    3 +-
 solr/webapp/web/partials/index.html                |   34 +
 solr/webapp/web/partials/login.html                |   17 +
 versions.lock                                      |  103 +-
 versions.props                                     |   21 +-
 1814 files changed, 49064 insertions(+), 42021 deletions(-)
 delete mode 100644 .github/workflows/gradle-wrapper-validation.yml
 create mode 100644 buildSrc/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
 delete mode 100644 dev-tools/scripts/createPatch.py
 create mode 100644 dev-tools/scripts/create_line_file_docs.py
 delete mode 100755 dev-tools/scripts/prep-solr-ref-guide-rc.sh
 delete mode 100755 dev-tools/scripts/publish-solr-ref-guide.sh
 delete mode 100644 dev-tools/scripts/svnBranchToGit.py
 delete mode 100644 gradle/defaults-idea.gradle
 delete mode 100644 gradle/defaults-javadoc.gradle
 create mode 100644 gradle/documentation/changes-to-html.gradle
 create mode 100644 gradle/documentation/documentation.gradle
 create mode 100644 gradle/documentation/markdown.gradle
 create mode 100644 gradle/ide/intellij-idea.gradle
 create mode 100644 gradle/jar-manifest.gradle
 create mode 100644 gradle/render-javadoc.gradle
 create mode 100644 gradle/validation/check-broken-links.gradle
 create mode 100644 gradle/validation/validate-log-calls.gradle
 create mode 100644 gradle/wrapper/gradle-wrapper.jar.sha256
 create mode 100644 gradle/wrapper/gradle-wrapper.jar.version
 create mode 100644 help/validateLogCalls.txt
 create mode 100644 lucene/BUILD.md
 delete mode 100644 lucene/BUILD.txt
 create mode 100644 lucene/JRE_VERSION_MIGRATION.md
 delete mode 100644 lucene/JRE_VERSION_MIGRATION.txt
 create mode 100644 lucene/MIGRATE.md
 delete mode 100644 lucene/MIGRATE.txt
 create mode 100644 lucene/README.md
 delete mode 100644 lucene/README.txt
 create mode 100644 lucene/SYSTEM_REQUIREMENTS.md
 delete mode 100644 lucene/SYSTEM_REQUIREMENTS.txt
 create mode 100644 lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
 create mode 100644 lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java
 create mode 100644 lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
 create mode 100644 lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package-info.java
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.0-cfs.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.0-nocfs.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.1-cfs.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.1-nocfs.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.0.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.1.zip
 create mode 100644 lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip
 create mode 100644 lucene/core/src/java/org/apache/lucene/codecs/CompoundDirectory.java
 delete mode 100644 lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
 delete mode 100644 lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java
 delete mode 100644 lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/index/IndexSorter.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/search/FuzzyAutomatonBuilder.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/search/QueueSizeBasedExecutor.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/search/SliceExecutor.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/util/ClassLoaderUtils.java
 delete mode 100644 lucene/core/src/java/org/apache/lucene/util/SPIClassIterator.java
 create mode 100644 lucene/core/src/java/org/apache/lucene/util/fst/BitTableUtil.java
 create mode 100644 lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider
 delete mode 100644 lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
 create mode 100644 lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java
 create mode 100644 lucene/core/src/test/org/apache/lucene/index/TestBufferedUpdates.java
 delete mode 100644 lucene/core/src/test/org/apache/lucene/util/TestBitUtil.java
 create mode 100644 lucene/core/src/test/org/apache/lucene/util/TestClassLoaderUtils.java
 delete mode 100644 lucene/core/src/test/org/apache/lucene/util/TestSPIClassIterator.java
 create mode 100644 lucene/core/src/test/org/apache/lucene/util/fst/TestBitTableUtil.java
 create mode 100644 lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRange.java
 create mode 100644 lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeFactory.java
 create mode 100644 lucene/grouping/src/java/org/apache/lucene/search/grouping/DoubleRangeGroupSelector.java
 create mode 100644 lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRange.java
 create mode 100644 lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeFactory.java
 create mode 100644 lucene/grouping/src/java/org/apache/lucene/search/grouping/LongRangeGroupSelector.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/BaseGroupSelectorTestCase.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/BlockGroupingTest.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/DoubleRangeGroupSelectorTest.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/LongRangeGroupSelectorTest.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/TermGroupSelectorTest.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/TestDoubleRangeFactory.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/TestLongRangeFactory.java
 create mode 100644 lucene/grouping/src/test/org/apache/lucene/search/grouping/ValueSourceGroupSelectorTest.java
 delete mode 100644 lucene/licenses/jetty-continuation-9.4.24.v20191120.jar.sha1
 create mode 100644 lucene/licenses/jetty-continuation-9.4.27.v20200227.jar.sha1
 delete mode 100644 lucene/licenses/jetty-http-9.4.24.v20191120.jar.sha1
 create mode 100644 lucene/licenses/jetty-http-9.4.27.v20200227.jar.sha1
 delete mode 100644 lucene/licenses/jetty-io-9.4.24.v20191120.jar.sha1
 create mode 100644 lucene/licenses/jetty-io-9.4.27.v20200227.jar.sha1
 delete mode 100644 lucene/licenses/jetty-server-9.4.24.v20191120.jar.sha1
 create mode 100644 lucene/licenses/jetty-server-9.4.27.v20200227.jar.sha1
 delete mode 100644 lucene/licenses/jetty-servlet-9.4.24.v20191120.jar.sha1
 create mode 100644 lucene/licenses/jetty-servlet-9.4.27.v20200227.jar.sha1
 delete mode 100644 lucene/licenses/jetty-util-9.4.24.v20191120.jar.sha1
 create mode 100644 lucene/licenses/jetty-util-9.4.27.v20200227.jar.sha1
 delete mode 100644 lucene/licenses/log4j-api-2.11.2.jar.sha1
 create mode 100644 lucene/licenses/log4j-api-2.13.2.jar.sha1
 delete mode 100644 lucene/licenses/log4j-core-2.11.2.jar.sha1
 create mode 100644 lucene/licenses/log4j-core-2.13.2.jar.sha1
 delete mode 100644 lucene/licenses/morfologik-ukrainian-search-3.9.0.jar.sha1
 create mode 100644 lucene/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1
 create mode 100644 lucene/monitor/src/test/org/apache/lucene/monitor/TestDocumentBatch.java
 create mode 100644 lucene/site/xsl/index.template.md
 create mode 100644 lucene/test-framework/src/resources/org/apache/lucene/util/europarl.lines.txt.seek
 create mode 100644 solr/README.md
 delete mode 100644 solr/README.txt
 create mode 100644 solr/contrib/analysis-extras/README.md
 delete mode 100644 solr/contrib/analysis-extras/README.txt
 create mode 100644 solr/contrib/analytics/src/test-files/solr/analytics/legacy/facetWithDottedFields.txt
 create mode 100644 solr/contrib/analytics/src/test/org/apache/solr/analytics/util/OldAnalyticsRequestConverterUnitTest.java
 rename solr/contrib/clustering/{README.txt => README.md} (100%)
 create mode 100644 solr/contrib/dataimporthandler/README.md
 delete mode 100644 solr/contrib/dataimporthandler/README.txt
 create mode 100644 solr/contrib/extraction/README.md
 delete mode 100644 solr/contrib/extraction/README.txt
 create mode 100644 solr/contrib/jaegertracer-configurator/README.md
 delete mode 100644 solr/contrib/jaegertracer-configurator/README.txt
 create mode 100644 solr/contrib/langid/README.md
 delete mode 100644 solr/contrib/langid/README.txt
 delete mode 120000 solr/contrib/ltr/README.txt
 create mode 100644 solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestFeature.java
 delete mode 100644 solr/contrib/ltr/src/test/org/apache/solr/ltr/feature/TestOriginalScoreScorer.java
 create mode 100644 solr/contrib/prometheus-exporter/README.md
 delete mode 100644 solr/contrib/prometheus-exporter/README.txt
 delete mode 100644 solr/core/src/java/org/apache/solr/cloud/CurrentCoreDescriptorProvider.java
 create mode 100644 solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java
 create mode 100644 solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
 create mode 100644 solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java
 create mode 100644 solr/core/src/java/org/apache/solr/core/SolrPaths.java
 create mode 100644 solr/core/src/java/org/apache/solr/handler/admin/ZookeeperReadAPI.java
 create mode 100644 solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java
 delete mode 100644 solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java
 create mode 100644 solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java
 delete mode 100644 solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java
 create mode 100644 solr/core/src/java/org/apache/solr/handler/export/FloatComp.java
 delete mode 100644 solr/core/src/java/org/apache/solr/handler/export/LongCmp.java
 create mode 100644 solr/core/src/java/org/apache/solr/handler/export/LongComp.java
 delete mode 100644 solr/core/src/java/org/apache/solr/search/ScoreFilter.java
 delete mode 100644 solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
 create mode 100644 solr/core/src/java/org/apache/solr/search/facet/FacetContext.java
 create mode 100644 solr/core/src/java/org/apache/solr/search/facet/FacetParser.java
 create mode 100644 solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java
 create mode 100644 solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
 create mode 100644 solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java
 create mode 100644 solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java
 delete mode 100644 solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
 create mode 100644 solr/core/src/java/org/apache/solr/security/CertAuthPlugin.java
 create mode 100644 solr/core/src/java/org/apache/solr/security/ExternalRoleRuleBasedAuthorizationPlugin.java
 create mode 100644 solr/core/src/java/org/apache/solr/security/RuleBasedAuthorizationPluginBase.java
 delete mode 100644 solr/core/src/java/org/apache/solr/util/DefaultSolrThreadFactory.java
 create mode 100644 solr/core/src/java/org/apache/solr/util/DynamicMap.java
 create mode 100644 solr/core/src/java/org/apache/solr/util/FloatConsumer.java
 create mode 100644 solr/core/src/java/org/apache/solr/util/IntFloatDynamicMap.java
 create mode 100644 solr/core/src/java/org/apache/solr/util/IntIntDynamicMap.java
 create mode 100644 solr/core/src/java/org/apache/solr/util/IntLongDynamicMap.java
 delete mode 100644 solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java
 delete mode 100644 solr/core/src/test/org/apache/solr/cloud/HealthCheckHandlerTest.java
 create mode 100644 solr/core/src/test/org/apache/solr/common/cloud/ZkDynamicConfigTest.java
 create mode 100644 solr/core/src/test/org/apache/solr/handler/admin/HealthCheckHandlerTest.java
 create mode 100644 solr/core/src/test/org/apache/solr/handler/admin/ZookeeperReadAPITest.java
 create mode 100644 solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
 create mode 100644 solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java
 create mode 100644 solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetErrors.java
 create mode 100644 solr/core/src/test/org/apache/solr/search/facet/TestJsonRangeFacets.java
 create mode 100644 solr/core/src/test/org/apache/solr/security/BaseTestRuleBasedAuthorizationPlugin.java
 create mode 100644 solr/core/src/test/org/apache/solr/security/CertAuthPluginTest.java
 create mode 100644 solr/core/src/test/org/apache/solr/security/PrincipalWithUserRoles.java
 create mode 100644 solr/core/src/test/org/apache/solr/security/TestExternalRoleRuleBasedAuthorizationPlugin.java
 delete mode 100644 solr/core/src/test/org/apache/solr/security/TestRuleBasedAuthorizationPlugin.java
 create mode 100644 solr/core/src/test/org/apache/solr/util/DynamicMapsTest.java
 create mode 100644 solr/example/README.md
 delete mode 100644 solr/example/README.txt
 create mode 100644 solr/example/example-DIH/README.md
 delete mode 100644 solr/example/example-DIH/README.txt
 create mode 100644 solr/example/files/README.md
 delete mode 100644 solr/example/files/README.txt
 create mode 100644 solr/example/films/README.md
 delete mode 100644 solr/example/films/README.txt
 delete mode 100644 solr/licenses/caffeine-2.8.0.jar.sha1
 create mode 100644 solr/licenses/caffeine-2.8.4.jar.sha1
 delete mode 100644 solr/licenses/commons-cli-1.2.jar.sha1
 create mode 100644 solr/licenses/commons-cli-1.4.jar.sha1
 delete mode 100644 solr/licenses/commons-fileupload-1.3.3.jar.sha1
 delete mode 100644 solr/licenses/commons-fileupload-LICENSE-ASL.txt
 delete mode 100644 solr/licenses/commons-fileupload-NOTICE.txt
 delete mode 100644 solr/licenses/http2-client-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/http2-client-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/http2-common-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/http2-common-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/http2-hpack-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/http2-hpack-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/http2-http-client-transport-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/http2-http-client-transport-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/http2-server-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/http2-server-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-alpn-client-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-alpn-client-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-alpn-java-client-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-alpn-java-client-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-alpn-java-server-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-alpn-java-server-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-alpn-server-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-alpn-server-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-client-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-client-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-continuation-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-continuation-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-deploy-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-deploy-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-http-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-http-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-io-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-io-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-jmx-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-jmx-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-rewrite-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-rewrite-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-security-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-security-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-server-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-server-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-servlet-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-servlet-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-servlets-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-servlets-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-start-9.4.24.v20191120-shaded.jar.sha1
 create mode 100644 solr/licenses/jetty-start-9.4.27.v20200227-shaded.jar.sha1
 delete mode 100644 solr/licenses/jetty-util-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-util-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-webapp-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-webapp-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/jetty-xml-9.4.24.v20191120.jar.sha1
 create mode 100644 solr/licenses/jetty-xml-9.4.27.v20200227.jar.sha1
 delete mode 100644 solr/licenses/log4j-1.2-api-2.11.2.jar.sha1
 create mode 100644 solr/licenses/log4j-1.2-api-2.13.2.jar.sha1
 delete mode 100644 solr/licenses/log4j-api-2.11.2.jar.sha1
 create mode 100644 solr/licenses/log4j-api-2.13.2.jar.sha1
 delete mode 100644 solr/licenses/log4j-core-2.11.2.jar.sha1
 create mode 100644 solr/licenses/log4j-core-2.13.2.jar.sha1
 delete mode 100644 solr/licenses/log4j-slf4j-impl-2.11.2.jar.sha1
 create mode 100644 solr/licenses/log4j-slf4j-impl-2.13.2.jar.sha1
 delete mode 100644 solr/licenses/log4j-web-2.11.2.jar.sha1
 create mode 100644 solr/licenses/log4j-web-2.13.2.jar.sha1
 delete mode 100644 solr/licenses/metrics-core-4.1.2.jar.sha1
 create mode 100644 solr/licenses/metrics-core-4.1.5.jar.sha1
 delete mode 100644 solr/licenses/metrics-graphite-4.1.2.jar.sha1
 create mode 100644 solr/licenses/metrics-graphite-4.1.5.jar.sha1
 delete mode 100644 solr/licenses/metrics-jetty9-4.1.2.jar.sha1
 create mode 100644 solr/licenses/metrics-jetty9-4.1.5.jar.sha1
 delete mode 100644 solr/licenses/metrics-jmx-4.1.2.jar.sha1
 create mode 100644 solr/licenses/metrics-jmx-4.1.5.jar.sha1
 delete mode 100644 solr/licenses/metrics-jvm-4.1.2.jar.sha1
 create mode 100644 solr/licenses/metrics-jvm-4.1.5.jar.sha1
 delete mode 100644 solr/licenses/morfologik-ukrainian-search-3.9.0.jar.sha1
 create mode 100644 solr/licenses/morfologik-ukrainian-search-4.9.1.jar.sha1
 delete mode 100644 solr/licenses/netty-buffer-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-buffer-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-codec-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-codec-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-common-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-common-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-handler-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-handler-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-resolver-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-resolver-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-transport-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-transport-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-transport-native-epoll-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-transport-native-epoll-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/netty-transport-native-unix-common-4.1.29.Final.jar.sha1
 create mode 100644 solr/licenses/netty-transport-native-unix-common-4.1.47.Final.jar.sha1
 delete mode 100644 solr/licenses/tika-core-1.23.jar.sha1
 create mode 100644 solr/licenses/tika-core-1.24.jar.sha1
 delete mode 100644 solr/licenses/tika-java7-1.23.jar.sha1
 create mode 100644 solr/licenses/tika-java7-1.24.jar.sha1
 delete mode 100644 solr/licenses/tika-parsers-1.23.jar.sha1
 create mode 100644 solr/licenses/tika-parsers-1.24.jar.sha1
 delete mode 100644 solr/licenses/tika-xmp-1.23.jar.sha1
 create mode 100644 solr/licenses/tika-xmp-1.24.jar.sha1
 delete mode 100644 solr/licenses/zookeeper-3.5.5.jar.sha1
 create mode 100644 solr/licenses/zookeeper-3.5.7.jar.sha1
 delete mode 100644 solr/licenses/zookeeper-jute-3.5.5.jar.sha1
 create mode 100644 solr/licenses/zookeeper-jute-3.5.7.jar.sha1
 create mode 100644 solr/server/README.md
 delete mode 100644 solr/server/README.txt
 create mode 100644 solr/server/solr/README.md
 delete mode 100644 solr/server/solr/README.txt
 rename solr/server/solr/configsets/sample_techproducts_configs/conf/clustering/carrot2/{README.txt => README.md} (100%)
 create mode 100644 solr/server/solr/configsets/sample_techproducts_configs/conf/velocity/README.md
 delete mode 100644 solr/server/solr/configsets/sample_techproducts_configs/conf/velocity/README.txt
 rename solr/site/{SYSTEM_REQUIREMENTS.mdtext => SYSTEM_REQUIREMENTS.md} (100%)
 create mode 100644 solr/site/index.template.md
 create mode 100644 solr/site/online-link.template.md
 delete mode 100755 solr/solr-ref-guide/src/_includes/head_print.html
 delete mode 100755 solr/solr-ref-guide/src/_includes/taglogic.html
 delete mode 100755 solr/solr-ref-guide/src/_includes/toc.html
 delete mode 100755 solr/solr-ref-guide/src/_layouts/default_print.html
 delete mode 100755 solr/solr-ref-guide/src/_layouts/page_print.html
 create mode 100644 solr/solr-ref-guide/src/cert-authentication-plugin.adoc
 delete mode 100755 solr/solr-ref-guide/src/css/customstyles.css
 create mode 100644 solr/solr-ref-guide/src/css/decoration.css
 delete mode 100755 solr/solr-ref-guide/src/css/font-awesome.min.css
 delete mode 100755 solr/solr-ref-guide/src/css/lavish-bootstrap.css
 create mode 100644 solr/solr-ref-guide/src/css/navs.css
 delete mode 100755 solr/solr-ref-guide/src/css/printstyles.css
 create mode 100644 solr/solr-ref-guide/src/css/search.css
 delete mode 100644 solr/solr-ref-guide/src/css/theme-solr.css
 delete mode 100755 solr/solr-ref-guide/src/fonts/glyphicons/glyphicons-halflings-regular.eot
 delete mode 100755 solr/solr-ref-guide/src/fonts/glyphicons/glyphicons-halflings-regular.svg
 delete mode 100755 solr/solr-ref-guide/src/fonts/glyphicons/glyphicons-halflings-regular.ttf
 delete mode 100755 solr/solr-ref-guide/src/fonts/glyphicons/glyphicons-halflings-regular.woff
 delete mode 100755 solr/solr-ref-guide/src/fonts/glyphicons/glyphicons-halflings-regular.woff2
 delete mode 100644 solr/solr-ref-guide/src/fonts/mplus1mn/mplus1mn-bold-ascii.ttf
 delete mode 100644 solr/solr-ref-guide/src/fonts/mplus1mn/mplus1mn-bold_italic-ascii.ttf
 delete mode 100644 solr/solr-ref-guide/src/fonts/mplus1mn/mplus1mn-italic-ascii.ttf
 delete mode 100644 solr/solr-ref-guide/src/fonts/mplus1mn/mplus1mn-regular-ascii-conums.ttf
 delete mode 100644 solr/solr-ref-guide/src/fonts/mplus1p-regular-fallback.ttf
 delete mode 100644 solr/solr-ref-guide/src/js/ref-guide-toc.js
 delete mode 100755 solr/solr-ref-guide/src/js/toc.js
 create mode 100644 solr/solr-ref-guide/src/stream-api.adoc
 create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java
 create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java
 create mode 100644 solr/solrj/src/java/org/apache/solr/common/cloud/ZkDynamicConfig.java
 create mode 100644 solr/solrj/src/java/org/apache/solr/common/util/SolrNamedThreadFactory.java
 delete mode 100644 solr/solrj/src/java/org/apache/solr/common/util/SolrjNamedThreadFactory.java
 rename solr/test-framework/{README.txt => README.md} (100%)


[lucene-solr] 34/47: SOLR-14511: Documented node.sysprop shard preference (#1536)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit d7762e9fede9ce11bc441718c2d9dfae30da6e83
Author: Radu Gheorghe <ra...@sematext.com>
AuthorDate: Thu May 28 19:41:44 2020 +0300

    SOLR-14511: Documented node.sysprop shard preference (#1536)
    
    * Documented node.sysprop shard preference
    
    For https://issues.apache.org/jira/browse/SOLR-13445
    
    * Added defaultShardPreferences to CLUSTERPROP doc
    
    As implemented via https://issues.apache.org/jira/browse/SOLR-13445
---
 solr/solr-ref-guide/src/cluster-node-management.adoc | 19 ++++++++++++++++++-
 solr/solr-ref-guide/src/distributed-requests.adoc    |  4 ++++
 2 files changed, 22 insertions(+), 1 deletion(-)

diff --git a/solr/solr-ref-guide/src/cluster-node-management.adoc b/solr/solr-ref-guide/src/cluster-node-management.adoc
index 571666f..e477802 100644
--- a/solr/solr-ref-guide/src/cluster-node-management.adoc
+++ b/solr/solr-ref-guide/src/cluster-node-management.adoc
@@ -131,7 +131,7 @@ Add, edit or delete a cluster-wide property.
 === CLUSTERPROP Parameters
 
 `name`::
-The name of the property. Supported properties names are `autoAddReplicas`, `legacyCloud`, `location`, `maxCoresPerNode` and `urlScheme`. Other properties can be set
+The name of the property. Supported properties names are `autoAddReplicas`, `legacyCloud`, `location`, `maxCoresPerNode`, `urlScheme` and `defaultShardPreferences`. Other properties can be set
 (for example, if you need them for custom plugins) but they must begin with the prefix `ext.`. Unknown properties that don't begin with `ext.` will be rejected.
 
 `val`::
@@ -213,6 +213,23 @@ replaced with `defaults`. Using the `collectionDefaults` parameter in Solr 7.4 o
  but the format of the properties will automatically be converted to the new nested structure.
 Support for the "collectionDefaults" key will be removed in Solr 9.
 
+=== Default Shard Preferences
+
+Using the `defaultShardPreferences` parameter, you can implement rack or availability zone awareness. First, make sure to "label" your nodes using a <<configuring-solrconfig-xml.adoc#jvm-system-properties,system property>> (e.g. `-Drack=rack1`). Then, set the value of `defaultShardPreferences` to `node.sysprop:sysprop.YOUR_PROPERTY_NAME` like this:
+
+[source,bash]
+----
+curl -X POST -H 'Content-type:application/json' --data-binary '
+{
+  "set-property" : {
+    "name" : "defaultShardPreferences",
+    "val" : "node.sysprop:sysprop.rack"
+  }
+}' http://localhost:8983/api/cluster
+----
+
+At this point, if you run a query on a node having e.g. `rack=rack1`, Solr will try to hit only replicas from `rack1`.
+
 [[balanceshardunique]]
 == BALANCESHARDUNIQUE: Balance a Property Across Nodes
 
diff --git a/solr/solr-ref-guide/src/distributed-requests.adoc b/solr/solr-ref-guide/src/distributed-requests.adoc
index 7b9a109..191bb85 100644
--- a/solr/solr-ref-guide/src/distributed-requests.adoc
+++ b/solr/solr-ref-guide/src/distributed-requests.adoc
@@ -189,6 +189,10 @@ Applied after sorting by inherent replica attributes, this property defines a fa
 +
 `stable[:hash[:_paramName_]]` the string value associated with the given parameter name is hashed to a dividend that is used to determine replica preference order (analogous to the explicit `dividend` property above); `_paramName_` defaults to `q` if not specified, providing stable routing keyed to the string value of the "main query". Note that this may be inappropriate for some use cases (e.g., static main queries that leverage parameter substitution)
 
+`node.sysprop`::
+Query will be routed to nodes with same defined system properties as the current one. For example, if you start Solr nodes on different racks, you'll want to identify those nodes by a <<configuring-solrconfig-xml.adoc#jvm-system-properties,system property>> (e.g. `-Drack=rack1`). Then, queries can contain `shards.preference=node.sysprop:sysprop.rack`, to make sure you always hit shards with the same value of `rack`.
+
+
 Examples:
 
 * Prefer stable routing (keyed to client "sessionId" param) among otherwise equivalent replicas:


[lucene-solr] 22/47: SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue.

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3731d71df62df91643b93980fc592ede23fe786f
Author: Andrzej Bialecki <ab...@apache.org>
AuthorDate: Tue May 26 12:56:08 2020 +0200

    SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue.
---
 lucene/ivy-versions.properties        | 2 +-
 solr/CHANGES.txt                      | 2 ++
 solr/licenses/caffeine-2.8.0.jar.sha1 | 1 -
 solr/licenses/caffeine-2.8.4.jar.sha1 | 1 +
 4 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 3fccbfb..ed4f53a 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -23,7 +23,7 @@ com.fasterxml.jackson.core.version = 2.10.1
 /com.fasterxml.jackson.core/jackson-databind = ${com.fasterxml.jackson.core.version}
 /com.fasterxml.jackson.dataformat/jackson-dataformat-smile = ${com.fasterxml.jackson.core.version}
 
-/com.github.ben-manes.caffeine/caffeine = 2.8.0
+/com.github.ben-manes.caffeine/caffeine = 2.8.4
 /com.github.virtuald/curvesapi = 1.06
 
 /com.github.zafarkhaja/java-semver = 0.9.0
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 62f3a9c..a5cf874 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -201,6 +201,8 @@ Bug Fixes
 * SOLR-14504: ZkController LiveNodesListener has NullPointerException in startup race.
   (Colvin Cowie via ab)
 
+* SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue. (Jakub Zytka, ab)
+
 Other Changes
 ---------------------
 * SOLR-14197: SolrResourceLoader: marked many methods as deprecated, and in some cases rerouted exiting logic to avoid
diff --git a/solr/licenses/caffeine-2.8.0.jar.sha1 b/solr/licenses/caffeine-2.8.0.jar.sha1
deleted file mode 100644
index ce291c4..0000000
--- a/solr/licenses/caffeine-2.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6000774d7f8412ced005a704188ced78beeed2bb
diff --git a/solr/licenses/caffeine-2.8.4.jar.sha1 b/solr/licenses/caffeine-2.8.4.jar.sha1
new file mode 100644
index 0000000..813e00d
--- /dev/null
+++ b/solr/licenses/caffeine-2.8.4.jar.sha1
@@ -0,0 +1 @@
+e5730b11981406faa28e0912405a0ce7c2d0f377


[lucene-solr] 20/47: SOLR-14443: Make SolrLogPostTool resilient to odd requests (#1525)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 8e475cc3819db28a24bea9b2667b4db9f00fba12
Author: Jason Gerlowski <ge...@apache.org>
AuthorDate: Fri May 22 10:08:26 2020 -0400

    SOLR-14443: Make SolrLogPostTool resilient to odd requests (#1525)
---
 .../java/org/apache/solr/util/SolrLogPostTool.java | 182 +++++++++++----------
 .../org/apache/solr/util/SolrLogPostToolTest.java  |  18 ++
 2 files changed, 114 insertions(+), 86 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java b/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java
index a1b67dd..4e20f2c 100644
--- a/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java
+++ b/solr/core/src/java/org/apache/solr/util/SolrLogPostTool.java
@@ -27,6 +27,8 @@ import java.util.TreeMap;
 import java.util.UUID;
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
+
+import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -96,13 +98,11 @@ public class SolrLogPostTool {
 
             rec++;
             UUID id = UUID.randomUUID();
-            doc.addField("id", id.toString());
-            doc.addField("file_s", fileName);
+            doc.setField("id", id.toString());
+            doc.setField("file_s", fileName);
             request.add(doc);
             if (rec == 300) {
-              CLIO.out("Sending batch of 300 log records...");
-              request.process(client);
-              CLIO.out("Batch sent");
+              sendBatch(client, request, false /* normal batch */);
               request = new UpdateRequest();
               rec = 0;
             }
@@ -113,17 +113,35 @@ public class SolrLogPostTool {
       }
 
       if (rec > 0) {
-        //Process last batch
-        CLIO.out("Sending last batch ...");
-        request.process(client);
-        client.commit();
-        CLIO.out("Committed");
+        sendBatch(client, request, true /* last batch */);
       }
     } finally {
       client.close();
     }
   }
 
+  private static void sendBatch(SolrClient client, UpdateRequest request, boolean lastRequest) throws SolrServerException, IOException {
+    final String beginMessage = lastRequest ? "Sending last batch ..." : "Sending batch of 300 log records...";
+    CLIO.out(beginMessage);
+    try {
+      request.process(client);
+      CLIO.out("Batch sent");
+    } catch (Exception e) {
+      CLIO.err("Batch sending failed: " + e.getMessage());
+      e.printStackTrace(CLIO.getErrStream());
+    }
+
+    if (lastRequest) {
+      try {
+        client.commit();
+        CLIO.out("Committed");
+      } catch (Exception e) {
+        CLIO.err("Unable to commit documents: " + e.getMessage());
+        e.printStackTrace(CLIO.getErrStream());
+      }
+    }
+  }
+
   static void gatherFiles(File rootFile, List<File> files) {
 
     if(rootFile.isFile()) {
@@ -228,50 +246,48 @@ public class SolrLogPostTool {
       return null;
     }
 
+    private void setFieldIfUnset(SolrInputDocument doc, String fieldName, String fieldValue) {
+      if (doc.containsKey(fieldName)) return;
+
+      doc.setField(fieldName, fieldValue);
+    }
+
     private SolrInputDocument parseError(String line, String trace) throws IOException {
       SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("date_dt", parseDate(line));
-      doc.addField("type_s", "error");
-      doc.addField("line_t", line);
+      doc.setField("date_dt", parseDate(line));
+      doc.setField("type_s", "error");
+      doc.setField("line_t", line);
 
       //Don't include traces that have only the %html header.
       if(trace != null && trace.length() > 6) {
-        doc.addField("stack_t", trace);
+        doc.setField("stack_t", trace);
       }
 
       if(this.cause != null) {
-        doc.addField("root_cause_t", cause.replace("Caused by:", "").trim());
+        doc.setField("root_cause_t", cause.replace("Caused by:", "").trim());
       }
 
-      doc.addField("collection_s", parseCollection(line));
-      doc.addField("core_s", parseCore(line));
-      doc.addField("shard_s", parseShard(line));
-      doc.addField("replica_s", parseReplica(line));
+      doc.setField("collection_s", parseCollection(line));
+      doc.setField("core_s", parseCore(line));
+      doc.setField("shard_s", parseShard(line));
+      doc.setField("replica_s", parseReplica(line));
 
       return doc;
     }
 
     private SolrInputDocument parseCommit(String line) throws IOException {
       SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("date_dt", parseDate(line));
-      doc.addField("type_s", "commit");
-      doc.addField("line_t", line);
-      if(line.contains("softCommit=true")) {
-        doc.addField("soft_commit_s", "true");
-      } else {
-        doc.addField("soft_commit_s", "false");
-      }
+      doc.setField("date_dt", parseDate(line));
+      doc.setField("type_s", "commit");
+      doc.setField("line_t", line);
+      doc.setField("soft_commit_s", Boolean.toString(line.contains("softCommit=true")));
 
-      if(line.contains("openSearcher=true")) {
-        doc.addField("open_searcher_s", "true");
-      } else {
-        doc.addField("open_searcher_s", "false");
-      }
+      doc.setField("open_searcher_s", Boolean.toString(line.contains("openSearcher=true")));
 
-      doc.addField("collection_s", parseCollection(line));
-      doc.addField("core_s", parseCore(line));
-      doc.addField("shard_s", parseShard(line));
-      doc.addField("replica_s", parseReplica(line));
+      doc.setField("collection_s", parseCollection(line));
+      doc.setField("core_s", parseCore(line));
+      doc.setField("shard_s", parseShard(line));
+      doc.setField("replica_s", parseReplica(line));
 
       return doc;
     }
@@ -279,36 +295,36 @@ public class SolrLogPostTool {
     private SolrInputDocument parseQueryRecord(String line) {
 
       SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("date_dt", parseDate(line));
-      doc.addField("qtime_i", parseQTime(line));
-      doc.addField("status_s", parseStatus(line));
+      doc.setField("date_dt", parseDate(line));
+      doc.setField("qtime_i", parseQTime(line));
+      doc.setField("status_s", parseStatus(line));
 
       String path = parsePath(line);
-      doc.addField("path_s", path);
+      doc.setField("path_s", path);
 
       if(line.contains("hits=")) {
-        doc.addField("hits_l", parseHits(line));
+        doc.setField("hits_l", parseHits(line));
       }
 
       String params = parseParams(line);
-      doc.addField("params_t", params);
+      doc.setField("params_t", params);
       addParams(doc, params);
 
-      doc.addField("collection_s", parseCollection(line));
-      doc.addField("core_s", parseCore(line));
-      doc.addField("node_s", parseNode(line));
-      doc.addField("shard_s", parseShard(line));
-      doc.addField("replica_s", parseReplica(line));
+      doc.setField("collection_s", parseCollection(line));
+      doc.setField("core_s", parseCore(line));
+      doc.setField("node_s", parseNode(line));
+      doc.setField("shard_s", parseShard(line));
+      doc.setField("replica_s", parseReplica(line));
 
 
       if(path != null && path.contains("/admin")) {
-        doc.addField("type_s", "admin");
+        doc.setField("type_s", "admin");
       } else if(path != null && params.contains("/replication")) {
-        doc.addField("type_s", "replication");
+        doc.setField("type_s", "replication");
       } else if (path != null && path.contains("/get")) {
-        doc.addField("type_s", "get");
+        doc.setField("type_s", "get");
       } else {
-        doc.addField("type_s", "query");
+        doc.setField("type_s", "query");
       }
 
       return doc;
@@ -318,10 +334,10 @@ public class SolrLogPostTool {
     private SolrInputDocument parseNewSearch(String line) {
 
       SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("date_dt", parseDate(line));
-      doc.addField("core_s", parseNewSearcherCore(line));
-      doc.addField("type_s", "newSearcher");
-      doc.addField("line_t", line);
+      doc.setField("date_dt", parseDate(line));
+      doc.setField("core_s", parseNewSearcherCore(line));
+      doc.setField("type_s", "newSearcher");
+      doc.setField("line_t", line);
 
       return doc;
     }
@@ -338,21 +354,21 @@ public class SolrLogPostTool {
 
     private SolrInputDocument parseUpdate(String line) {
       SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("date_dt", parseDate(line));
+      doc.setField("date_dt", parseDate(line));
 
       if(line.contains("deleteByQuery=")) {
-        doc.addField("type_s", "deleteByQuery");
+        doc.setField("type_s", "deleteByQuery");
       } else if(line.contains("delete=")) {
-        doc.addField("type_s", "delete");
+        doc.setField("type_s", "delete");
       } else {
-        doc.addField("type_s", "update");
+        doc.setField("type_s", "update");
       }
 
-      doc.addField("collection_s", parseCollection(line));
-      doc.addField("core_s", parseCore(line));
-      doc.addField("shard_s", parseShard(line));
-      doc.addField("replica_s", parseReplica(line));
-      doc.addField("line_t", line);
+      doc.setField("collection_s", parseCollection(line));
+      doc.setField("core_s", parseCore(line));
+      doc.setField("shard_s", parseShard(line));
+      doc.setField("replica_s", parseReplica(line));
+      doc.setField("line_t", line);
 
       return doc;
     }
@@ -474,47 +490,51 @@ public class SolrLogPostTool {
       return builder.toString();
     }
 
+    private void addOrReplaceFieldValue(SolrInputDocument doc, String fieldName, String fieldValue) {
+      doc.setField(fieldName, fieldValue);
+    }
+
     private void addParams(SolrInputDocument doc,  String params) {
       String[] pairs = params.split("&");
       for(String pair : pairs) {
         String[] parts = pair.split("=");
         if(parts.length == 2 && parts[0].equals("q")) {
           String dq = URLDecoder.decode(parts[1], Charset.defaultCharset());
-          doc.addField("q_s", dq);
-          doc.addField("q_t", dq);
+          setFieldIfUnset(doc, "q_s", dq);
+          setFieldIfUnset(doc, "q_t", dq);
         }
 
         if(parts[0].equals("rows")) {
           String dr = URLDecoder.decode(parts[1], Charset.defaultCharset());
-          doc.addField("rows_i", dr);
+          setFieldIfUnset(doc, "rows_i", dr);
         }
 
         if(parts[0].equals("distrib")) {
           String dr = URLDecoder.decode(parts[1], Charset.defaultCharset());
-          doc.addField("distrib_s", dr);
+          setFieldIfUnset(doc, "distrib_s", dr);
         }
 
         if(parts[0].equals("shards")) {
-          doc.addField("shards_s", "true");
+          setFieldIfUnset(doc, "shards_s", "true");
         }
 
         if(parts[0].equals("ids") && !isRTGRequest(doc)) {
-          doc.addField("ids_s", "true");
+          setFieldIfUnset(doc, "ids_s", "true");
         }
 
         if(parts[0].equals("isShard")) {
           String dr = URLDecoder.decode(parts[1], Charset.defaultCharset());
-          doc.addField("isShard_s", dr);
+          setFieldIfUnset(doc, "isShard_s", dr);
         }
 
         if(parts[0].equals("wt")) {
           String dr = URLDecoder.decode(parts[1], Charset.defaultCharset());
-          doc.addField("wt_s", dr);
+          setFieldIfUnset(doc, "wt_s", dr);
         }
 
         if(parts[0].equals("facet")) {
           String dr = URLDecoder.decode(parts[1], Charset.defaultCharset());
-          doc.addField("facet_s", dr);
+          setFieldIfUnset(doc, "facet_s", dr);
         }
 
         if(parts[0].equals("shards.purpose")) {
@@ -533,19 +553,9 @@ public class SolrLogPostTool {
       //Special params used to determine what stage a query is.
       //So we populate with defaults.
       //The absence of the distrib params means its a distributed query.
-
-
-      if(doc.getField("distrib_s") == null) {
-        doc.addField("distrib_s", "true");
-      }
-
-      if(doc.getField("shards_s") == null) {
-        doc.addField("shards_s", "false");
-      }
-
-      if(doc.getField("ids_s") == null) {
-        doc.addField("ids_s", "false");
-      }
+      setFieldIfUnset(doc, "distrib_s", "true");
+      setFieldIfUnset(doc, "shards_s", "false");
+      setFieldIfUnset(doc, "ids_s", "false");
     }
 
     private boolean isRTGRequest(SolrInputDocument doc) {
diff --git a/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java b/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java
index 10f7b8c..c65f5f1 100644
--- a/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java
+++ b/solr/core/src/test/org/apache/solr/util/SolrLogPostToolTest.java
@@ -75,6 +75,24 @@ public class SolrLogPostToolTest extends SolrTestCaseJ4 {
     assertEquals("REFINE_FACETS", purposes[1].toString());
   }
 
+  // Requests which have multiple copies of the same param should be parsed so that the first param value only is
+  // indexed, since the log schema expects many of these to be single-valued fields and will throw errors if multiple
+  // values are received.
+  @Test
+  public void testRecordsFirstInstanceOfSingleValuedParams() throws Exception {
+    final String record = "2019-12-09 15:05:01.931 INFO  (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1]  webapp=/solr path=/select params={q=*:*&q=inStock:true&_=1575835181759&shards.purpose=36&isShard=true&wt=javabin&wt=xml&distrib=false} hits=234868 status=0 QTime=8\n";
+
+    List<SolrInputDocument> docs = readDocs(record);
+    assertEquals(docs.size(), 1);
+    SolrInputDocument doc = docs.get(0);
+
+    assertEquals(doc.getFieldValues("q_s").size(), 1);
+    assertEquals(doc.getFieldValue("q_s"), "*:*");
+
+    assertEquals(doc.getFieldValues("wt_s").size(), 1);
+    assertEquals(doc.getFieldValue("wt_s"), "javabin");
+  }
+
   @Test
   public void testRTGRecord() throws Exception {
     final String record = "2020-03-19 20:00:30.845 INFO  (qtp1635378213-20354) [c:logs4 s:shard8 r:core_node63 x:logs4_shard8_replica_n60] o.a.s.c.S.Request [logs4_shard8_replica_n60]  webapp=/solr path=/get params={qt=/get&_stateVer_=logs4:104&ids=id1&ids=id2&ids=id3&wt=javabin&version=2} status=0 QTime=61";


[lucene-solr] 15/47: SOLR-13289: Rename minExactHits to minExactCount (#1511)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit ea36cb595f660cfcaf7357eab3acee3fe1db2ffa
Author: Tomas Fernandez Lobbe <tf...@apache.org>
AuthorDate: Thu May 21 16:42:37 2020 -0700

    SOLR-13289: Rename minExactHits to minExactCount (#1511)
---
 solr/CHANGES.txt                                   |  2 +-
 .../solr/handler/component/QueryComponent.java     | 12 ++++----
 .../java/org/apache/solr/search/QueryCommand.java  | 10 +++----
 .../org/apache/solr/search/QueryResultKey.java     | 10 +++----
 .../org/apache/solr/search/SolrIndexSearcher.java  |  6 ++--
 .../org/apache/solr/TestDistributedSearch.java     | 16 +++++------
 .../test/org/apache/solr/TestGroupingSearch.java   |  4 +--
 .../org/apache/solr/core/QueryResultKeyTest.java   |  2 +-
 .../test/org/apache/solr/request/TestFaceting.java |  6 ++--
 .../apache/solr/search/SolrIndexSearcherTest.java  | 32 +++++++++++-----------
 .../solr/search/TestCollapseQParserPlugin.java     |  4 +--
 .../apache/solr/common/params/CommonParams.java    |  4 +--
 .../solr/common/params/CommonParamsTest.java       |  2 +-
 13 files changed, 55 insertions(+), 55 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1e0fa23..63193d7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -133,7 +133,7 @@ Optimizations
 
 * LUCENE-7788: fail precommit on unparameterised log messages and examine for wasted work/objects (Erick Erickson)
 
-* SOLR-13289: When the "minExactHits" parameters is provided in queries and it's value is lower than the number of hits,
+* SOLR-13289: When the "minExactCount" parameters is provided in queries and it's value is lower than the number of hits,
   Solr can speedup the query resolution by using the Block-Max WAND algorithm (see LUCENE-8135). When doing this, the
   value of matching documents in the response (numFound) will be an approximation.
   (Ishan Chattopadhyaya, Munendra S N, Tomás Fernández Löbbe, David Smiley)
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index c113bef..752cc4d 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -366,7 +366,7 @@ public class QueryComponent extends SearchComponent
 
     QueryCommand cmd = rb.createQueryCommand();
     cmd.setTimeAllowed(timeAllowed);
-    cmd.setMinExactHits(getMinExactHits(params));
+    cmd.setMinExactCount(getMinExactCount(params));
 
     req.getContext().put(SolrIndexSearcher.STATS_SOURCE, statsCache.get(req));
     
@@ -403,12 +403,12 @@ public class QueryComponent extends SearchComponent
     doProcessUngroupedSearch(rb, cmd, result);
   }
 
-  private int getMinExactHits(SolrParams params) {
-    long minExactHits = params.getLong(CommonParams.MIN_EXACT_HITS, Integer.MAX_VALUE);
-    if (minExactHits < 0 || minExactHits > Integer.MAX_VALUE) {
-      minExactHits = Integer.MAX_VALUE;
+  private int getMinExactCount(SolrParams params) {
+    long minExactCount = params.getLong(CommonParams.MIN_EXACT_COUNT, Integer.MAX_VALUE);
+    if (minExactCount < 0 || minExactCount > Integer.MAX_VALUE) {
+      minExactCount = Integer.MAX_VALUE;
     }
-    return (int)minExactHits;
+    return (int)minExactCount;
   }
 
   protected void doFieldSortValues(ResponseBuilder rb, SolrIndexSearcher searcher) throws IOException
diff --git a/solr/core/src/java/org/apache/solr/search/QueryCommand.java b/solr/core/src/java/org/apache/solr/search/QueryCommand.java
index a272878..e0b4256 100755
--- a/solr/core/src/java/org/apache/solr/search/QueryCommand.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryCommand.java
@@ -37,7 +37,7 @@ public class QueryCommand {
   private int supersetMaxDoc;
   private int flags;
   private long timeAllowed = -1;
-  private int minExactHits = Integer.MAX_VALUE;
+  private int minExactCount = Integer.MAX_VALUE;
   private CursorMark cursorMark;
   
   public CursorMark getCursorMark() {
@@ -184,12 +184,12 @@ public class QueryCommand {
     return this;
   }
 
-  public int getMinExactHits() {
-    return minExactHits;
+  public int getMinExactCount() {
+    return minExactCount;
   }
 
-  public QueryCommand setMinExactHits(int hits) {
-    this.minExactHits = hits;
+  public QueryCommand setMinExactCount(int count) {
+    this.minExactCount = count;
     return this;
   }
   
diff --git a/solr/core/src/java/org/apache/solr/search/QueryResultKey.java b/solr/core/src/java/org/apache/solr/search/QueryResultKey.java
index 23374e8..eba36ae 100644
--- a/solr/core/src/java/org/apache/solr/search/QueryResultKey.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryResultKey.java
@@ -37,7 +37,7 @@ public final class QueryResultKey implements Accountable {
   final SortField[] sfields;
   final List<Query> filters;
   final int nc_flags;  // non-comparable flags... ignored by hashCode and equals
-  final int minExactHits;
+  final int minExactCount;
 
   private final int hc;  // cached hashCode
   private final long ramBytesUsed; // cached
@@ -48,12 +48,12 @@ public final class QueryResultKey implements Accountable {
     this(query, filters, sort, nc_flags, Integer.MAX_VALUE);
   }
 
-  public QueryResultKey(Query query, List<Query> filters, Sort sort, int nc_flags, int minExactHits) {
+  public QueryResultKey(Query query, List<Query> filters, Sort sort, int nc_flags, int minExactCount) {
     this.query = query;
     this.sort = sort;
     this.filters = filters;
     this.nc_flags = nc_flags;
-    this.minExactHits = minExactHits;
+    this.minExactCount = minExactCount;
 
     int h = query.hashCode();
 
@@ -70,7 +70,7 @@ public final class QueryResultKey implements Accountable {
       h = h*29 + sf.hashCode();
       ramSfields += BASE_SF_RAM_BYTES_USED + RamUsageEstimator.sizeOfObject(sf.getField());
     }
-    h = h*31 + minExactHits;
+    h = h*31 + minExactCount;
 
     hc = h;
 
@@ -102,7 +102,7 @@ public final class QueryResultKey implements Accountable {
     if (this.sfields.length != other.sfields.length) return false;
     if (!this.query.equals(other.query)) return false;
     if (!unorderedCompare(this.filters, other.filters)) return false;
-    if (this.minExactHits != other.minExactHits) return false;
+    if (this.minExactCount != other.minExactCount) return false;
 
     for (int i=0; i<sfields.length; i++) {
       SortField sf1 = this.sfields[i];
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index af968d6..a779bd8 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -1308,7 +1308,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
         && (flags & (NO_CHECK_QCACHE | NO_SET_QCACHE)) != ((NO_CHECK_QCACHE | NO_SET_QCACHE))) {
       // all of the current flags can be reused during warming,
       // so set all of them on the cache key.
-      key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags, cmd.getMinExactHits());
+      key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags, cmd.getMinExactCount());
       if ((flags & NO_CHECK_QCACHE) == 0) {
         superset = queryResultCache.get(key);
 
@@ -1485,7 +1485,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
    *          The Command whose properties should determine the type of TopDocsCollector to use.
    */
   private TopDocsCollector buildTopDocsCollector(int len, QueryCommand cmd) throws IOException {
-    int minNumFound = cmd.getMinExactHits();
+    int minNumFound = cmd.getMinExactCount();
     Query q = cmd.getQuery();
     if (q instanceof RankQuery) {
       RankQuery rq = (RankQuery) q;
@@ -1628,7 +1628,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
 
     boolean needScores = (cmd.getFlags() & GET_SCORES) != 0;
     int maxDoc = maxDoc();
-    cmd.setMinExactHits(Integer.MAX_VALUE);// We need the full DocSet
+    cmd.setMinExactCount(Integer.MAX_VALUE);// We need the full DocSet
 
     ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
     final Query query = QueryUtils.combineQueryAndFilter(QueryUtils.makeQueryable(cmd.getQuery()), pf.filter);
diff --git a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
index fb62107..a8ce5d6 100644
--- a/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestDistributedSearch.java
@@ -211,7 +211,7 @@ public class TestDistributedSearch extends BaseDistributedSearchTestCase {
     query("q","*:*", "sort","n_tl1 desc");
     
     handle.put("maxScore", SKIPVAL);
-    testMinExactHits();
+    testMinExactCount();
     
     query("q","{!func}"+i1);// does not expect maxScore. So if it comes ,ignore it. JavaBinCodec.writeSolrDocumentList()
     //is agnostic of request params.
@@ -1090,13 +1090,13 @@ public class TestDistributedSearch extends BaseDistributedSearchTestCase {
           "stats.facet", fieldName);
   }
 
-  private void testMinExactHits() throws Exception {
-    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_HITS, "200", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc");
-    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_HITS, "-1", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc");
-    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_HITS, "1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc");
-    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", "facet", "true", "facet.field", s1, CommonParams.MIN_EXACT_HITS,"1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc");
-    assertIsExactHitCount("q","{!cache=false}id:1", CommonParams.MIN_EXACT_HITS,"1", CommonParams.ROWS, "1");
-    assertApproximatedHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_HITS,"2", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc");
+  private void testMinExactCount() throws Exception {
+    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "200", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc");
+    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "-1", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc");
+    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT, "1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc");
+    assertIsExactHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", "facet", "true", "facet.field", s1, CommonParams.MIN_EXACT_COUNT,"1", CommonParams.ROWS, "200", CommonParams.SORT, "score desc, id asc");
+    assertIsExactHitCount("q","{!cache=false}id:1", CommonParams.MIN_EXACT_COUNT,"1", CommonParams.ROWS, "1");
+    assertApproximatedHitCount("q","{!cache=false}dog OR men OR cow OR country OR dumpty", CommonParams.MIN_EXACT_COUNT,"2", CommonParams.ROWS, "2", CommonParams.SORT, "score desc, id asc");
   }
   
   private void assertIsExactHitCount(Object... requestParams) throws Exception {
diff --git a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
index 8bec510..a7ff5a5 100644
--- a/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
+++ b/solr/core/src/test/org/apache/solr/TestGroupingSearch.java
@@ -950,7 +950,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
     }
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("q", FOO_STRING_FIELD + ":Book1");
-    assertQ(req(params, CommonParams.MIN_EXACT_HITS, "2", CommonParams.ROWS, "2")
+    assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2")
         ,"/response/result[@numFoundExact='false']"
     );
     params.set("group", true);
@@ -959,7 +959,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
         ,"/response/lst[@name='grouped']/lst[@name='"+FOO_STRING_FIELD+"']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']"
     );
     
-    assertQ(req(params, CommonParams.MIN_EXACT_HITS, "2", CommonParams.ROWS, "2")
+    assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2")
         ,"/response/lst[@name='grouped']/lst[@name='"+FOO_STRING_FIELD+"']/arr[@name='groups']/lst[1]/result[@numFoundExact='true']"
     );
     
diff --git a/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java b/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java
index 775ae5e..b7147ee 100644
--- a/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java
+++ b/solr/core/src/test/org/apache/solr/core/QueryResultKeyTest.java
@@ -137,7 +137,7 @@ public class QueryResultKeyTest extends SolrTestCaseJ4 {
     assert minIters <= iter;
   }
   
-  public void testMinExactHits() {
+  public void testMinExactCount() {
     int[] nums = smallArrayOfRandomNumbers();
     final Query base = new FlatHashTermQuery("base");
     assertKeyEquals(new QueryResultKey(base, buildFiltersFromNumbers(nums), null, 0, 10),
diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java
index 81d4d71..0e9284e 100644
--- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java
+++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java
@@ -935,7 +935,7 @@ public class TestFaceting extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testFacetCountsWithMinExactHits() throws Exception {
+  public void testFacetCountsWithMinExactCount() throws Exception {
     final int NUM_DOCS = 20;
     for (int i = 0; i < NUM_DOCS ; i++) {
       assertU(adoc("id", String.valueOf(i), "title_ws", "Book1"));
@@ -950,8 +950,8 @@ public class TestFaceting extends SolrTestCaseJ4 {
         ,"//*[@numFoundExact='true']"
         ,"//*[@numFound='" + NUM_DOCS + "']");
     
-    // It doesn't matter if we request minExactHits, when requesting facets, the numFound value is precise
-    assertQ(req(params, CommonParams.MIN_EXACT_HITS, "2", CommonParams.ROWS, "2"),
+    // It doesn't matter if we request minExactCount, when requesting facets, the numFound value is precise
+    assertQ(req(params, CommonParams.MIN_EXACT_COUNT, "2", CommonParams.ROWS, "2"),
         "//lst[@name='facet_fields']/lst[@name='title_ws']/int[1][@name='Book1'][.='20']"
         ,"//*[@numFoundExact='true']"
         ,"//*[@numFound='" + NUM_DOCS + "']");
diff --git a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
index e670133..d1f557e 100644
--- a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
+++ b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
@@ -65,27 +65,27 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     super.setUp();
   }
   
-  public void testMinExactHitsLongValue() {
+  public void testMinExactCountLongValue() {
     assertQ("test query on empty index",
         req("q", "field1_s:foo", 
-            "minExactHits", Long.toString(10L * Integer.MAX_VALUE),
+            "minExactCount", Long.toString(10L * Integer.MAX_VALUE),
             "rows", "2")
         ,"//*[@numFoundExact='true']"
         ,"//*[@numFound='" + NUM_DOCS + "']"
         );
   }
   
-  public void testMinExactHits() {
-    assertQ("minExactHits is lower than numFound,should produce approximated results",
+  public void testMinExactCount() {
+    assertQ("minExactCount is lower than numFound,should produce approximated results",
             req("q", "field1_s:foo", 
-                "minExactHits", "2",
+                "minExactCount", "2",
                 "rows", "2")
             ,"//*[@numFoundExact='false']"
             ,"//*[@numFound<='" + NUM_DOCS + "']"
             );
-    assertQ("minExactHits is higher than numFound,should produce exact results",
+    assertQ("minExactCount is higher than numFound,should produce exact results",
         req("q", "field1_s:foo", 
-            "minExactHits", "200",
+            "minExactCount", "200",
             "rows", "2")
         ,"//*[@numFoundExact='true']"
         ,"//*[@numFound='" + NUM_DOCS + "']"
@@ -108,7 +108,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     return qr;
   }
   
-  public void testLowMinExactHitsGeneratesApproximation() throws IOException {
+  public void testLowMinExactCountGeneratesApproximation() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo");
       assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
@@ -122,7 +122,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     });
   }
 
-  public void testHighMinExactHitsGeneratesExactCount() throws IOException {
+  public void testHighMinExactCountGeneratesExactCount() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field1_s", "foo");
       assertMatchesEqual(NUM_DOCS, searcher, cmd);
@@ -138,7 +138,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
 
   
   
-  public void testLowMinExactHitsWithQueryResultCache() throws IOException {
+  public void testLowMinExactCountWithQueryResultCache() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo");
       cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
@@ -148,7 +148,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     });
   }
   
-  public void testHighMinExactHitsWithQueryResultCache() throws IOException {
+  public void testHighMinExactCountWithQueryResultCache() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 2, "field1_s", "foo");
       cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
@@ -158,7 +158,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     });
   }
   
-  public void testMinExactHitsMoreRows() throws IOException {
+  public void testMinExactCountMoreRows() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(2, NUM_DOCS, "field1_s", "foo");
       assertMatchesEqual(NUM_DOCS, searcher, cmd);
@@ -166,7 +166,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     });
   }
   
-  public void testMinExactHitsMatchWithDocSet() throws IOException {
+  public void testMinExactCountMatchWithDocSet() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo");
       assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
@@ -177,7 +177,7 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     });
   }
   
-  public void testMinExactHitsWithMaxScoreRequested() throws IOException {
+  public void testMinExactCountWithMaxScoreRequested() throws IOException {
     h.getCore().withSearcher(searcher -> {
       QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo");
       cmd.setFlags(SolrIndexSearcher.GET_SCORES);
@@ -248,9 +248,9 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
     });
   }
 
-  private QueryCommand createBasicQueryCommand(int minExactHits, int length, String field, String q) {
+  private QueryCommand createBasicQueryCommand(int minExactCount, int length, String field, String q) {
     QueryCommand cmd = new QueryCommand();
-    cmd.setMinExactHits(minExactHits);
+    cmd.setMinExactCount(minExactCount);
     cmd.setLen(length);
     cmd.setFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
     cmd.setQuery(new TermQuery(new Term(field, q)));
diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
index 2c6f9c4..177242a 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
@@ -1041,7 +1041,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
   }
   
   @Test
-  public void testMinExactHitsDisabledByCollapse() throws Exception {
+  public void testMinExactCountDisabledByCollapse() throws Exception {
     int numDocs = 10;
     String collapseFieldInt = "field_ti_dv";
     String collapseFieldFloat = "field_tf_dv";
@@ -1060,7 +1060,7 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
       assertQ(req(
           "q", "{!cache=false}field_s:1",
           "rows", "1",
-          "minExactHits", "1",
+          "minExactCount", "1",
           // this collapse will end up matching all docs
           "fq", "{!collapse field=" + collapseField + " nullPolicy=expand}"// nullPolicy needed due to a bug when val=0
           ),"//*[@numFoundExact='true']"
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
index 2f88c96..bc1ed09 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
@@ -164,10 +164,10 @@ public interface CommonParams {
   String TIME_ALLOWED = "timeAllowed";
 
   /**
-   * The number of hits that need to be counted accurately. If more than {@link #MIN_EXACT_HITS} documents
+   * The number of hits that need to be counted accurately. If more than {@link #MIN_EXACT_COUNT} documents
    * match a query, then the value in "numFound" may be an estimate to speedup search.
    */
-  String MIN_EXACT_HITS = "minExactHits";
+  String MIN_EXACT_COUNT = "minExactCount";
   
   /** 'true' if the header should include the handler name */
   String HEADER_ECHO_HANDLER = "echoHandler";
diff --git a/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java b/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
index 24a09d6..c384637 100755
--- a/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/params/CommonParamsTest.java
@@ -34,5 +34,5 @@ public class CommonParamsTest extends SolrTestCase
 
   public void testPreferLocalShards() { assertEquals("preferLocalShards", CommonParams.PREFER_LOCAL_SHARDS); }
   
-  public void testMinExactHits() { assertEquals("minExactHits", CommonParams.MIN_EXACT_HITS); }
+  public void testMinExactCount() { assertEquals("minExactCount", CommonParams.MIN_EXACT_COUNT); }
 }


[lucene-solr] 30/47: SOLR-14498: BlockCache gets stuck not accepting new stores fixing checksums

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit cebb4417d937e40b586c9a8da59dbaffcb4a1d3a
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Wed May 27 20:56:13 2020 -0400

    SOLR-14498: BlockCache gets stuck not accepting new stores fixing checksums
---
 solr/licenses/caffeine-2.8.0.jar.sha1 | 1 -
 solr/licenses/caffeine-2.8.4.jar.sha1 | 1 +
 2 files changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/licenses/caffeine-2.8.0.jar.sha1 b/solr/licenses/caffeine-2.8.0.jar.sha1
deleted file mode 100644
index ce291c4..0000000
--- a/solr/licenses/caffeine-2.8.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6000774d7f8412ced005a704188ced78beeed2bb
diff --git a/solr/licenses/caffeine-2.8.4.jar.sha1 b/solr/licenses/caffeine-2.8.4.jar.sha1
new file mode 100644
index 0000000..813e00d
--- /dev/null
+++ b/solr/licenses/caffeine-2.8.4.jar.sha1
@@ -0,0 +1 @@
+e5730b11981406faa28e0912405a0ce7c2d0f377


[lucene-solr] 04/47: SOLR-14476: Add percentiles and standard deviation aggregations to stats, facet and timeseries Streaming Expressions

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit dd0864259f6e6a73c0130012ed53d741465a33c5
Author: Joel Bernstein <jb...@apache.org>
AuthorDate: Mon May 18 16:22:56 2020 -0400

    SOLR-14476: Add percentiles and standard deviation aggregations to stats, facet and timeseries Streaming Expressions
---
 .../org/apache/solr/handler/sql/SolrTable.java     |   2 +-
 .../org/apache/solr/handler/TestSQLHandler.java    |   3 +-
 .../java/org/apache/solr/client/solrj/io/Lang.java |   4 +
 .../solr/client/solrj/io/stream/FacetStream.java   |  73 ++++--
 .../solr/client/solrj/io/stream/StatsStream.java   | 286 ++++++++++----------
 .../client/solrj/io/stream/TimeSeriesStream.java   |   8 +-
 .../solrj/io/stream/metrics/PercentileMetric.java  |  84 ++++++
 .../client/solrj/io/stream/metrics/StdMetric.java  |  93 +++++++
 .../org/apache/solr/client/solrj/io/TestLang.java  |   2 +-
 .../solrj/io/stream/StreamExpressionTest.java      | 289 +++++++++++++++++++--
 10 files changed, 663 insertions(+), 181 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
index 66ee312..61b83a7 100644
--- a/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
+++ b/solr/core/src/java/org/apache/solr/handler/sql/SolrTable.java
@@ -799,7 +799,7 @@ class SolrTable extends AbstractQueryableTable implements TranslatableTable {
                                   String collection,
                                   String query,
                                   List<Pair<String, String>> metricPairs,
-                                  List<Map.Entry<String, Class>> fields) {
+                                  List<Map.Entry<String, Class>> fields) throws IOException {
 
 
     Map<String, Class> fmap = new HashMap();
diff --git a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
index e858c2f..b20bec5 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestSQLHandler.java
@@ -1439,8 +1439,7 @@ public class TestSQLHandler extends SolrCloudTestCase {
     // Test zero hits
     sParams = mapParams(CommonParams.QT, "/sql",
         "stmt", "select count(*), sum(a_i), min(a_i), max(a_i), cast(avg(1.0 * a_i) as float), sum(a_f), " +
-            "min(a_f), max(a_f), avg(a_f) from collection1 where a_s = 'blah'");
-
+         "min(a_f), max(a_f), avg(a_f) from collection1 where a_s = 'blah'");
 
     tuples = getTuples(sParams, baseUrl);
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
index 05ba98f..e2008be 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
@@ -28,6 +28,8 @@ import org.apache.solr.client.solrj.io.stream.metrics.CountMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.MinMetric;
+import org.apache.solr.client.solrj.io.stream.metrics.PercentileMetric;
+import org.apache.solr.client.solrj.io.stream.metrics.StdMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.SumMetric;
 
 public class Lang {
@@ -103,6 +105,8 @@ public class Lang {
         .withFunctionName("max", MaxMetric.class)
         .withFunctionName("avg", MeanMetric.class)
         .withFunctionName("sum", SumMetric.class)
+        .withFunctionName("per", PercentileMetric.class)
+        .withFunctionName("std", StdMetric.class)
         .withFunctionName("count", CountMetric.class)
 
             // tuple manipulation operations
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
index b2b2809..6e96cfd 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java
@@ -349,18 +349,24 @@ public class FacetStream extends TupleStream implements Expressible  {
 
   private FieldComparator[] parseBucketSorts(String bucketSortString, Bucket[] buckets) throws IOException {
 
-    String[] sorts = bucketSortString.split(",");
+    String[] sorts = parseSorts(bucketSortString);
+
     FieldComparator[] comps = new FieldComparator[sorts.length];
     for(int i=0; i<sorts.length; i++) {
       String s = sorts[i];
 
-      String[] spec = s.trim().split("\\s+"); //This should take into account spaces in the sort spec.
-      
-      if(2 != spec.length){
-        throw new IOException(String.format(Locale.ROOT,"invalid expression - bad bucketSort '%s'. Expected form 'field order'",bucketSortString));
+      String fieldName = null;
+      String order = null;
+
+      if(s.endsWith("asc") || s.endsWith("ASC")) {
+        order = "asc";
+        fieldName = s.substring(0, s.length()-3).trim().replace(" ", "");
+      } else if(s.endsWith("desc") || s.endsWith("DESC")) {
+        order = "desc";
+        fieldName = s.substring(0, s.length()-4).trim().replace(" ", "");
+      } else {
+        throw new IOException(String.format(Locale.ROOT,"invalid expression - bad bucketSort '%s'.",bucketSortString));
       }
-      String fieldName = spec[0].trim();
-      String order = spec[1].trim();
             
       comps[i] = new FieldComparator(fieldName, order.equalsIgnoreCase("asc") ? ComparatorOrder.ASCENDING : ComparatorOrder.DESCENDING);
     }
@@ -368,6 +374,34 @@ public class FacetStream extends TupleStream implements Expressible  {
     return comps;
   }
 
+  private String[] parseSorts(String sortString) {
+    List<String> sorts = new ArrayList();
+    boolean inParam = false;
+    StringBuilder buff = new StringBuilder();
+    for(int i=0; i<sortString.length(); i++) {
+      char c = sortString.charAt(i);
+      if(c == '(') {
+        inParam=true;
+        buff.append(c);
+      } else if (c == ')') {
+        inParam = false;
+        buff.append(c);
+      } else if (c == ',' && !inParam) {
+        sorts.add(buff.toString().trim());
+        buff = new StringBuilder();
+      } else {
+        buff.append(c);
+      }
+    }
+
+    if(buff.length() > 0) {
+      sorts.add(buff.toString());
+    }
+
+    return sorts.toArray(new String[sorts.size()]);
+  }
+
+
   private void init(String collection, SolrParams params, Bucket[] buckets, FieldComparator[] bucketSorts, Metric[] metrics, int rows, int offset, int bucketSizeLimit, boolean refine, String method, boolean serializeBucketSizeLimit, int overfetch, String zkHost) throws IOException {
     this.zkHost  = zkHost;
     this.params = new ModifiableSolrParams(params);
@@ -568,7 +602,7 @@ public class FacetStream extends TupleStream implements Expressible  {
 
     for(Metric metric: metrics) {
       String func = metric.getFunctionName();
-      if(!func.equals("count")) {
+      if(!func.equals("count") && !func.equals("per") && !func.equals("std")) {
         if (!json.contains(metric.getIdentifier())) {
           return false;
         }
@@ -680,18 +714,27 @@ public class FacetStream extends TupleStream implements Expressible  {
 
 
     ++level;
+    boolean comma = false;
     for(Metric metric : _metrics) {
       //Only compute the metric if it's a leaf node or if the branch level sort equals is the metric
       String facetKey = "facet_"+metricCount;
-      if(level == _buckets.length || fsort.equals(facetKey) ) {
-        String identifier = metric.getIdentifier();
-        if (!identifier.startsWith("count(")) {
-          if (metricCount > 0) {
-            buf.append(",");
+      String identifier = metric.getIdentifier();
+      if (!identifier.startsWith("count(")) {
+        if (comma) {
+          buf.append(",");
+        }
+
+        if(level == _buckets.length || fsort.equals(facetKey) ) {
+          comma = true;
+          if (identifier.startsWith("per(")) {
+            buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("per", "percentile")).append('"');
+          } else if (identifier.startsWith("std(")) {
+            buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("std", "stddev")).append('"');
+          } else {
+            buf.append('"').append(facetKey).append("\":\"").append(identifier).append('"');
           }
-          buf.append('"').append(facetKey).append("\":\"").append(identifier).append('"');
-          ++metricCount;
         }
+        ++metricCount;
       }
     }
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
index c497290..8747565 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
@@ -17,7 +17,9 @@
 package org.apache.solr.client.solrj.io.stream;
 
 import java.io.IOException;
+
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
@@ -40,67 +42,75 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParamete
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionValue;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+import org.apache.solr.client.solrj.io.stream.metrics.CountMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.Metric;
 import org.apache.solr.client.solrj.request.QueryRequest;
-import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 
 /**
-* @since 6.0.0
-*/
+ * @since 6.6.0
+ */
 public class StatsStream extends TupleStream implements Expressible  {
 
   private static final long serialVersionUID = 1;
 
+
+
   private Metric[] metrics;
-  private String zkHost;
   private Tuple tuple;
+  private int index;
+  private String zkHost;
   private SolrParams params;
   private String collection;
-  private boolean done;
-  private boolean doCount;
-  private Map<String, Metric> metricMap;
   protected transient SolrClientCache cache;
   protected transient CloudSolrClient cloudSolrClient;
-  protected StreamContext streamContext;
+  private StreamContext context;
 
   public StatsStream(String zkHost,
-                     String collection,
-                     SolrParams params,
-                     Metric[] metrics) {
-    init(zkHost, collection, params, metrics);
-  }
-
-  private void init(String zkHost, String collection, SolrParams params, Metric[] metrics) {
-    this.zkHost  = zkHost;
-    this.params = params;
-    this.metrics = metrics;
-    this.collection = collection;
-    metricMap = new HashMap();
-    for(Metric metric : metrics) {
-      metricMap.put(metric.getIdentifier(), metric);
-    }
+                          String collection,
+                          SolrParams params,
+                          Metric[] metrics
+                          ) throws IOException {
+    init(collection, params, metrics, zkHost);
   }
 
   public StatsStream(StreamExpression expression, StreamFactory factory) throws IOException{
     // grab all parameters out
     String collectionName = factory.getValueOperand(expression, 0);
+
+    if(collectionName.indexOf('"') > -1) {
+      collectionName = collectionName.replaceAll("\"", "").replaceAll(" ", "");
+    }
+
     List<StreamExpressionNamedParameter> namedParams = factory.getNamedOperands(expression);
-    List<StreamExpression> metricExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Metric.class);
-    StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
 
+    StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost");
+    List<StreamExpression> metricExpressions = factory.getExpressionOperandsRepresentingTypes(expression, Expressible.class, Metric.class);
 
     // Collection Name
     if(null == collectionName){
       throw new IOException(String.format(Locale.ROOT,"invalid expression %s - collectionName expected as first operand",expression));
     }
 
+    // Construct the metrics
+    Metric[] metrics = null;
+    if(metricExpressions.size() > 0) {
+      metrics = new Metric[metricExpressions.size()];
+      for(int idx = 0; idx < metricExpressions.size(); ++idx){
+        metrics[idx] = factory.constructMetric(metricExpressions.get(idx));
+      }
+    } else {
+      metrics = new Metric[1];
+      metrics[0] = new CountMetric();
+    }
+
+    // pull out known named params
     ModifiableSolrParams params = new ModifiableSolrParams();
     for(StreamExpressionNamedParameter namedParam : namedParams){
       if(!namedParam.getName().equals("zkHost")){
-        params.set(namedParam.getName(), namedParam.getParameter().toString().trim());
+        params.add(namedParam.getName(), namedParam.getParameter().toString().trim());
       }
     }
 
@@ -115,51 +125,55 @@ public class StatsStream extends TupleStream implements Expressible  {
       if(zkHost == null) {
         zkHost = factory.getDefaultZkHost();
       }
-    }
-    else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
+    } else if(zkHostExpression.getParameter() instanceof StreamExpressionValue){
       zkHost = ((StreamExpressionValue)zkHostExpression.getParameter()).getValue();
     }
 
-    /*
-    if(null == zkHost){
-      throw new IOException(String.format(Locale.ROOT,"invalid expression %s - zkHost not found for collection '%s'",expression,collectionName));
-    }
-    */
+    // We've got all the required items
+    init(collectionName, params, metrics, zkHost);
+  }
 
-    // metrics, optional - if not provided then why are you using this?
-    Metric[] metrics = new Metric[metricExpressions.size()];
-    for(int idx = 0; idx < metricExpressions.size(); ++idx){
-      metrics[idx] = factory.constructMetric(metricExpressions.get(idx));
-    }
+  public String getCollection() {
+    return this.collection;
+  }
 
-    // We've got all the required items
-    init(zkHost, collectionName, params, metrics);
+  private void init(String collection,
+                    SolrParams params,
+                    Metric[] metrics,
+                    String zkHost) throws IOException {
+    this.zkHost  = zkHost;
+    this.collection = collection;
+    this.metrics = metrics;
+    this.params = params;
   }
 
   @Override
   public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
-    // functionName(collectionName, param1, param2, ..., paramN, sort="comp", sum(fieldA), avg(fieldB))
-
     // function name
     StreamExpression expression = new StreamExpression(factory.getFunctionName(this.getClass()));
-
     // collection
-    expression.addParameter(collection);
+    if(collection.indexOf(',') > -1) {
+      expression.addParameter("\""+collection+"\"");
+    } else {
+      expression.addParameter(collection);
+    }
 
     // parameters
-    ModifiableSolrParams mParams = new ModifiableSolrParams(params);
-    for (Entry<String, String[]> param : mParams.getMap().entrySet()) {
-      expression.addParameter(new StreamExpressionNamedParameter(param.getKey(), String.join(",", param.getValue())));
-    }
+    ModifiableSolrParams tmpParams = new ModifiableSolrParams(params);
 
-    // zkHost
-    expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
+    for (Entry<String, String[]> param : tmpParams.getMap().entrySet()) {
+      expression.addParameter(new StreamExpressionNamedParameter(param.getKey(),
+          String.join(",", param.getValue())));
+    }
 
     // metrics
     for(Metric metric : metrics){
       expression.addParameter(metric.toExpression(factory));
     }
 
+    // zkHost
+    expression.addParameter(new StreamExpressionNamedParameter("zkHost", zkHost));
+
     return expression;
   }
 
@@ -173,45 +187,46 @@ public class StatsStream extends TupleStream implements Expressible  {
     explanation.setExpressionType(ExpressionType.STREAM_SOURCE);
     explanation.setExpression(toExpression(factory).toString());
 
+    // child is a datastore so add it at this point
     StreamExplanation child = new StreamExplanation(getStreamNodeId() + "-datastore");
-    child.setFunctionName(String.format(Locale.ROOT, "solr (worker ? of ?)"));
-      // TODO: fix this so we know the # of workers - check with Joel about a Stat's ability to be in a
-      // parallel stream.
+    child.setFunctionName(String.format(Locale.ROOT, "solr (%s)", collection));
+    // TODO: fix this so we know the # of workers - check with Joel about a Topic's ability to be in a
+    // parallel stream.
 
     child.setImplementingClass("Solr/Lucene");
     child.setExpressionType(ExpressionType.DATASTORE);
-    ModifiableSolrParams mParams = new ModifiableSolrParams(params);
-    child.setExpression(mParams.getMap().entrySet().stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), e.getValue())).collect(Collectors.joining(",")));
+
+    child.setExpression(params.stream().map(e -> String.format(Locale.ROOT, "%s=%s", e.getKey(), Arrays.toString(e.getValue()))).collect(Collectors.joining(",")));
+
     explanation.addChild(child);
 
     return explanation;
   }
 
   public void setStreamContext(StreamContext context) {
-    streamContext = context;
+    this.context = context;
     cache = context.getSolrClientCache();
   }
 
   public List<TupleStream> children() {
-    return new ArrayList<>();
+    return new ArrayList();
   }
 
   public void open() throws IOException {
-    ModifiableSolrParams paramsLoc = new ModifiableSolrParams(this.params);
-    addStats(paramsLoc, metrics);
-    paramsLoc.set("stats", "true");
+
+    String json = getJsonFacetString(metrics);
+
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams(params);
+    paramsLoc.set("json.facet", json);
     paramsLoc.set("rows", "0");
-    if (streamContext.isLocal()) {
-      paramsLoc.set("distrib", "false");
-    }
 
-    Map<String, List<String>> shardsMap = (Map<String, List<String>>)streamContext.get("shards");
+    Map<String, List<String>> shardsMap = (Map<String, List<String>>)context.get("shards");
     if(shardsMap == null) {
       QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST);
-      CloudSolrClient cloudSolrClient = cache.getCloudSolrClient(zkHost);
+      cloudSolrClient = cache.getCloudSolrClient(zkHost);
       try {
         NamedList response = cloudSolrClient.request(request, collection);
-        this.tuple = getTuple(response);
+        getTuples(response, metrics);
       } catch (Exception e) {
         throw new IOException(e);
       }
@@ -228,7 +243,7 @@ public class StatsStream extends TupleStream implements Expressible  {
       QueryRequest request = new QueryRequest(paramsLoc, SolrRequest.METHOD.POST);
       try {
         NamedList response = client.request(request);
-        this.tuple = getTuple(response);
+        getTuples(response, metrics);
       } catch (Exception e) {
         throw new IOException(e);
       }
@@ -246,117 +261,98 @@ public class StatsStream extends TupleStream implements Expressible  {
     return builder.toString();
   }
 
-
-
   public void close() throws IOException {
 
   }
 
   public Tuple read() throws IOException {
-    if(!done) {
-      done = true;
+    if(index == 0) {
+      ++index;
       return tuple;
     } else {
-      Map<String, Object> fields = new HashMap<>();
+      Map fields = new HashMap();
       fields.put("EOF", true);
-      return new Tuple(fields);
+      Tuple tuple = new Tuple(fields);
+      return tuple;
     }
   }
 
-  public StreamComparator getStreamSort() {
-    return null;
+  private String getJsonFacetString(Metric[] _metrics) {
+    StringBuilder buf = new StringBuilder();
+    appendJson(buf, _metrics);
+    return "{"+buf.toString()+"}";
   }
 
-  private void addStats(ModifiableSolrParams params, Metric[] _metrics) {
-    Map<String, List<String>> m = new HashMap<>();
+  private void appendJson(StringBuilder buf,
+                          Metric[] _metrics) {
+    
+    int metricCount = 0;
     for(Metric metric : _metrics) {
-      String metricId = metric.getIdentifier();
-      if(metricId.contains("(")) {
-        metricId = metricId.substring(0, metricId.length()-1);
-        String[] parts = metricId.split("\\(");
-        String function = parts[0];
-        String column = parts[1];
-        List<String> stats = m.get(column);
-
-        if(stats == null) {
-          stats = new ArrayList<>();
+      String identifier = metric.getIdentifier();
+      if(!identifier.startsWith("count(")) {
+        if(metricCount>0) {
+          buf.append(",");
         }
-
-        if(!column.equals("*")) {
-          m.put(column, stats);
-        }
-
-        if(function.equals("min")) {
-          stats.add("min");
-        } else if(function.equals("max")) {
-          stats.add("max");
-        } else if(function.equals("sum")) {
-          stats.add("sum");
-        } else if(function.equals("avg")) {
-          stats.add("mean");
-        } else if(function.equals("count")) {
-          this.doCount = true;
+        if(identifier.startsWith("per(")) {
+          buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("per", "percentile")).append('"');
+        } else if(identifier.startsWith("std(")) {
+          buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("std", "stddev")).append('"');
+        } else {
+          buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier).append('"');
         }
+        ++metricCount;
       }
     }
-
-    for(Entry<String, List<String>> entry : m.entrySet()) {
-      StringBuilder buf = new StringBuilder();
-      List<String> stats = entry.getValue();
-      buf.append("{!");
-
-      for(String stat : stats) {
-        buf.append(stat).append("=").append("true ");
-      }
-
-      buf.append("}").append(entry.getKey());
-      params.add("stats.field", buf.toString());
-    }
+    //buf.append("}");
   }
 
-  private Tuple getTuple(NamedList response) {
+  private void getTuples(NamedList response,
+                         Metric[] metrics) {
 
-    Map<String, Object> map = new HashMap<>();
-    SolrDocumentList solrDocumentList = (SolrDocumentList) response.get("response");
+    this.tuple = new Tuple(new HashMap());
+    NamedList facets = (NamedList)response.get("facets");
+    System.out.println("###### Facets:"+facets);
+    fillTuple(tuple, facets, metrics);
+  }
 
-    long count = solrDocumentList.getNumFound();
+  private void fillTuple(Tuple t,
+                         NamedList nl,
+                         Metric[] _metrics) {
 
-    if(doCount) {
-      map.put("count(*)", count);
+    if(nl == null) {
+      return;
     }
 
-    if(count != 0) {
-      NamedList stats = (NamedList)response.get("stats");
-      NamedList statsFields = (NamedList)stats.get("stats_fields");
-
-      for(int i=0; i<statsFields.size(); i++) {
-        String field = statsFields.getName(i);
-        NamedList theStats = (NamedList)statsFields.getVal(i);
-        for(int s=0; s<theStats.size(); s++) {
-          addStat(map, field, theStats.getName(s), theStats.getVal(s));
+    int m = 0;
+    for(Metric metric : _metrics) {
+      String identifier = metric.getIdentifier();
+      if(!identifier.startsWith("count(")) {
+        if(nl.get("facet_"+m) != null) {
+          Object d = nl.get("facet_" + m);
+          if(d instanceof Number) {
+            if (metric.outputLong) {
+              t.put(identifier, Math.round(((Number)d).doubleValue()));
+            } else {
+              t.put(identifier, ((Number)d).doubleValue());
+            }
+          } else {
+            t.put(identifier, d);
+          }
         }
+        ++m;
+      } else {
+        long l = ((Number)nl.get("count")).longValue();
+        t.put("count(*)", l);
       }
     }
-
-    return new Tuple(map);
   }
 
   public int getCost() {
     return 0;
   }
 
-  private void addStat(Map<String, Object> map, String field, String stat, Object val) {
-    if(stat.equals("mean")) {
-      String name = "avg("+field+")";
-      Metric m = metricMap.get(name);
-      if(m.outputLong) {
-        Number num = (Number) val;
-        map.put(name, Math.round(num.doubleValue()));
-      } else {
-        map.put(name, val);
-      }
-    } else {
-      map.put(stat+"("+field+")", val);
-    }
+  @Override
+  public StreamComparator getStreamSort() {
+    return null;
   }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java
index bccc438..ee4570d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java
@@ -366,7 +366,13 @@ public class TimeSeriesStream extends TupleStream implements Expressible  {
         if(metricCount>0) {
           buf.append(",");
         }
-        buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier).append('"');
+        if(identifier.startsWith("per(")) {
+          buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("per", "percentile")).append('"');
+        } else if(identifier.startsWith("std(")) {
+          buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier.replaceFirst("std", "stddev")).append('"');
+        } else {
+          buf.append("\"facet_").append(metricCount).append("\":\"").append(identifier).append('"');
+        }
         ++metricCount;
       }
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java
new file mode 100644
index 0000000..fe25de4
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/PercentileMetric.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream.metrics;
+
+import java.io.IOException;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class PercentileMetric extends Metric {
+  private long longMax = -Long.MIN_VALUE;
+  private double doubleMax = -Double.MAX_VALUE;
+  private String columnName;
+
+  public PercentileMetric(String columnName, int percentile){
+
+    init("per", columnName, percentile);
+  }
+
+  public PercentileMetric(StreamExpression expression, StreamFactory factory) throws IOException{
+    // grab all parameters out
+    String functionName = expression.getFunctionName();
+    String columnName = factory.getValueOperand(expression, 0);
+    int percentile = Integer.parseInt(factory.getValueOperand(expression, 1));
+
+    // validate expression contains only what we want.
+    if(null == columnName){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expected %s(columnName)", expression, functionName));
+    }
+    if(2 != expression.getParameters().size()){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - unknown operands found", expression));
+    }
+
+    init(functionName, columnName, percentile);
+  }
+
+  private void init(String functionName, String columnName, int percentile){
+    this.columnName = columnName;
+    setFunctionName(functionName);
+    setIdentifier(functionName, "(", columnName, ","+percentile, ")");
+  }
+
+  public Number getValue() {
+    if(longMax == Long.MIN_VALUE) {
+      return doubleMax;
+    } else {
+      return longMax;
+    }
+  }
+
+  public String[] getColumns() {
+    return new String[]{columnName};
+  }
+
+  public void update(Tuple tuple) {
+
+  }
+
+  public Metric newInstance() {
+    return new MaxMetric(columnName);
+  }
+
+  @Override
+  public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
+    return new StreamExpression(getFunctionName()).withParameter(columnName);
+  }
+}
\ No newline at end of file
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java
new file mode 100644
index 0000000..b4c55b9
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/metrics/StdMetric.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.stream.metrics;
+
+import java.io.IOException;
+import java.util.Locale;
+
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class StdMetric extends Metric {
+  // How'd the MeanMetric get to be so mean?
+  // Maybe it was born with it.
+  // Maybe it was mayba-mean.
+  //
+  // I'll see myself out.
+
+  private String columnName;
+  private double doubleSum;
+  private long longSum;
+  private long count;
+
+  public StdMetric(String columnName){
+    init("std", columnName, false);
+  }
+
+  public StdMetric(String columnName, boolean outputLong){
+    init("std", columnName, outputLong);
+  }
+
+  public StdMetric(StreamExpression expression, StreamFactory factory) throws IOException{
+    // grab all parameters out
+    String functionName = expression.getFunctionName();
+    String columnName = factory.getValueOperand(expression, 0);
+    String outputLong = factory.getValueOperand(expression, 1);
+
+
+    // validate expression contains only what we want.
+    if(null == columnName){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expected %s(columnName)", expression, functionName));
+    }
+
+    boolean ol = false;
+    if(outputLong != null) {
+      ol = Boolean.parseBoolean(outputLong);
+    }
+
+    init(functionName, columnName, ol);
+  }
+
+  private void init(String functionName, String columnName, boolean outputLong){
+    this.columnName = columnName;
+    this.outputLong = outputLong;
+    setFunctionName(functionName);
+    setIdentifier(functionName, "(", columnName, ")");
+  }
+
+  public void update(Tuple tuple) {
+  }
+
+  public Metric newInstance() {
+    return new MeanMetric(columnName, outputLong);
+  }
+
+  public String[] getColumns() {
+    return new String[]{columnName};
+  }
+
+  public Number getValue() {
+    return null;
+  }
+
+  @Override
+  public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException {
+    return new StreamExpression(getFunctionName()).withParameter(columnName).withParameter(Boolean.toString(outputLong));
+  }
+}
\ No newline at end of file
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
index 459626e..2c6cbc1 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
@@ -85,7 +85,7 @@ public class TestLang extends SolrTestCase {
       "getSupportPoints", "pairSort", "log10", "plist", "recip", "pivot", "ltrim", "rtrim", "export",
       "zplot", "natural", "repeat", "movingMAD", "hashRollup", "noop", "var", "stddev", "recNum", "isNull",
       "notNull", "matches", "projectToBorder", "double", "long", "parseCSV", "parseTSV", "dateTime",
-       "split", "upper", "trim", "lower", "trunc", "cosine", "dbscan"};
+       "split", "upper", "trim", "lower", "trunc", "cosine", "dbscan", "per", "std"};
 
   @Test
   public void testLang() {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index bc8b40d..7e5da9d 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -48,6 +48,8 @@ import org.apache.solr.client.solrj.io.stream.metrics.CountMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.MinMetric;
+import org.apache.solr.client.solrj.io.stream.metrics.PercentileMetric;
+import org.apache.solr.client.solrj.io.stream.metrics.StdMetric;
 import org.apache.solr.client.solrj.io.stream.metrics.SumMetric;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -762,7 +764,9 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     .withFunctionName("min", MinMetric.class)
     .withFunctionName("max", MaxMetric.class)
     .withFunctionName("avg", MeanMetric.class)
-    .withFunctionName("count", CountMetric.class);
+    .withFunctionName("count", CountMetric.class)
+    .withFunctionName("std", StdMetric.class)
+    .withFunctionName("per", PercentileMetric.class);
 
     StreamExpression expression;
     TupleStream stream;
@@ -771,7 +775,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     SolrClientCache cache = new SolrClientCache();
     try {
       streamContext.setSolrClientCache(cache);
-      String expr = "stats(" + COLLECTIONORALIAS + ", q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))";
+      String expr = "stats(" + COLLECTIONORALIAS + ", q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), std(a_i), std(a_f), per(a_i, 50), per(a_f, 50), count(*))";
       expression = StreamExpressionParser.parse(expr);
       stream = factory.constructStream(expression);
       stream.setStreamContext(streamContext);
@@ -792,6 +796,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       Double maxf = tuple.getDouble("max(a_f)");
       Double avgi = tuple.getDouble("avg(a_i)");
       Double avgf = tuple.getDouble("avg(a_f)");
+      Double stdi = tuple.getDouble("std(a_i)");
+      Double stdf = tuple.getDouble("std(a_f)");
+      Double peri = tuple.getDouble("per(a_i,50)");
+      Double perf = tuple.getDouble("per(a_f,50)");
       Double count = tuple.getDouble("count(*)");
 
       assertTrue(sumi.longValue() == 70);
@@ -802,11 +810,16 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       assertTrue(maxf.doubleValue() == 10.0D);
       assertTrue(avgi.doubleValue() == 7.0D);
       assertTrue(avgf.doubleValue() == 5.5D);
+      assertTrue(stdi.doubleValue() == 5.477225575051661D);
+      assertTrue(stdf.doubleValue() == 3.0276503540974917D);
+      assertTrue(peri.doubleValue() == 7.0D);
+      assertTrue(perf.doubleValue() == 5.5D);
       assertTrue(count.doubleValue() == 10);
 
+
       //Test without query
 
-      expr = "stats(" + COLLECTIONORALIAS + ", sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))";
+      expr = "stats(" + COLLECTIONORALIAS + ", sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), std(a_i), std(a_f), per(a_i, 50), per(a_f, 50), count(*))";
       expression = StreamExpressionParser.parse(expr);
       stream = factory.constructStream(expression);
       stream.setStreamContext(streamContext);
@@ -827,6 +840,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       maxf = tuple.getDouble("max(a_f)");
       avgi = tuple.getDouble("avg(a_i)");
       avgf = tuple.getDouble("avg(a_f)");
+      stdi = tuple.getDouble("std(a_i)");
+      stdf = tuple.getDouble("std(a_f)");
+      peri = tuple.getDouble("per(a_i,50)");
+      perf = tuple.getDouble("per(a_f,50)");
       count = tuple.getDouble("count(*)");
 
       assertTrue(sumi.longValue() == 70);
@@ -837,12 +854,15 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       assertTrue(maxf.doubleValue() == 10.0D);
       assertTrue(avgi.doubleValue() == 7.0D);
       assertTrue(avgf.doubleValue() == 5.5D);
+      assertTrue(stdi.doubleValue() == 5.477225575051661D);
+      assertTrue(stdf.doubleValue() == 3.0276503540974917D);
+      assertTrue(peri.doubleValue() == 7.0D);
+      assertTrue(perf.doubleValue() == 5.5D);
       assertTrue(count.doubleValue() == 10);
 
-
       //Test with shards parameter
       List<String> shardUrls = TupleStream.getShards(cluster.getZkServer().getZkAddress(), COLLECTIONORALIAS, streamContext);
-      expr = "stats(myCollection, q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), count(*))";
+      expr = "stats(myCollection, q=*:*, sum(a_i), sum(a_f), min(a_i), min(a_f), max(a_i), max(a_f), avg(a_i), avg(a_f), std(a_i), std(a_f), per(a_i, 50), per(a_f, 50), count(*))";
       Map<String, List<String>> shardsMap = new HashMap();
       shardsMap.put("myCollection", shardUrls);
       StreamContext context = new StreamContext();
@@ -867,6 +887,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       maxf = tuple.getDouble("max(a_f)");
       avgi = tuple.getDouble("avg(a_i)");
       avgf = tuple.getDouble("avg(a_f)");
+      stdi = tuple.getDouble("std(a_i)");
+      stdf = tuple.getDouble("std(a_f)");
+      peri = tuple.getDouble("per(a_i,50)");
+      perf = tuple.getDouble("per(a_f,50)");
       count = tuple.getDouble("count(*)");
 
       assertTrue(sumi.longValue() == 70);
@@ -877,6 +901,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       assertTrue(maxf.doubleValue() == 10.0D);
       assertTrue(avgi.doubleValue() == 7.0D);
       assertTrue(avgf.doubleValue() == 5.5D);
+      assertTrue(stdi.doubleValue() == 5.477225575051661D);
+      assertTrue(stdf.doubleValue() == 3.0276503540974917D);
+      assertTrue(peri.doubleValue() == 7.0D);
+      assertTrue(perf.doubleValue() == 5.5D);
       assertTrue(count.doubleValue() == 10);
 
       //Execersise the /stream hander
@@ -1135,6 +1163,8 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)
       .withFunctionName("avg", MeanMetric.class)
+      .withFunctionName("std", StdMetric.class)
+      .withFunctionName("per", PercentileMetric.class)
       .withFunctionName("count", CountMetric.class);
 
     // Basic test
@@ -1150,6 +1180,8 @@ public class StreamExpressionTest extends SolrCloudTestCase {
               +   "min(a_i), min(a_f), "
               +   "max(a_i), max(a_f), "
               +   "avg(a_i), avg(a_f), "
+              +   "std(a_i), std(a_f),"
+              +   "per(a_i, 50), per(a_f, 50),"
               +   "count(*)"
               + ")";
 
@@ -1158,7 +1190,6 @@ public class StreamExpressionTest extends SolrCloudTestCase {
 
     assert(tuples.size() == 3);
 
-    //Test Long and Double Sums
 
     Tuple tuple = tuples.get(0);
     String bucket = tuple.getString("a_s");
@@ -1170,6 +1201,12 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     Double maxf = tuple.getDouble("max(a_f)");
     Double avgi = tuple.getDouble("avg(a_i)");
     Double avgf = tuple.getDouble("avg(a_f)");
+    Double stdi = tuple.getDouble("std(a_i)");
+    Double stdf = tuple.getDouble("std(a_f)");
+    Double peri = tuple.getDouble("per(a_i,50)");
+    Double perf = tuple.getDouble("per(a_f,50)");
+
+
     Double count = tuple.getDouble("count(*)");
 
     assertTrue(bucket.equals("hello4"));
@@ -1182,6 +1219,11 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 7.5D);
     assertTrue(avgf.doubleValue() == 5.5D);
     assertTrue(count.doubleValue() == 2);
+    assertTrue(stdi.doubleValue() == 4.949747468305833D);
+    assertTrue(stdf.doubleValue() == 2.1213203435596424D);
+    assertTrue(peri.doubleValue() == 7.5D);
+    assertTrue(perf.doubleValue() ==  5.5D);
+
 
     tuple = tuples.get(1);
     bucket = tuple.getString("a_s");
@@ -1194,6 +1236,11 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
+
 
     assertTrue(bucket.equals("hello0"));
     assertTrue(sumi.doubleValue() == 17.0D);
@@ -1205,6 +1252,11 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 4.25D);
     assertTrue(avgf.doubleValue() == 4.5D);
     assertTrue(count.doubleValue() == 4);
+    assertTrue(stdi.doubleValue() == 6.551081335677848D);
+    assertTrue(stdf.doubleValue() == 4.041451884327381D);
+    assertTrue(peri.doubleValue() == 1.5D);
+    assertTrue(perf.doubleValue() ==  3.5D);
+
 
     tuple = tuples.get(2);
     bucket = tuple.getString("a_s");
@@ -1217,6 +1269,15 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
+
+
+    System.out.println("STD and Per:"+stdi+":"+stdf+":"+peri+":"+perf);
+//STD and Per:4.509249752822894:2.6457513110645907:11.0:7.0
+    //assert(false);
 
     assertTrue(bucket.equals("hello3"));
     assertTrue(sumi.doubleValue() == 38.0D);
@@ -1228,6 +1289,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 9.5D);
     assertTrue(avgf.doubleValue() == 6.5D);
     assertTrue(count.doubleValue() == 4);
+    assertTrue(stdi.doubleValue() == 4.509249752822894D);
+    assertTrue(stdf.doubleValue() == 2.6457513110645907D);
+    assertTrue(peri.doubleValue() == 11.0D);
+    assertTrue(perf.doubleValue() ==  7.0D);
 
 
     //Reverse the Sort.
@@ -1244,6 +1309,8 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         +   "min(a_i), min(a_f), "
         +   "max(a_i), max(a_f), "
         +   "avg(a_i), avg(a_f), "
+        +   "std(a_i), std(a_f),"
+        +   "per(a_i, 50), per(a_f, 50),"
         +   "count(*)"
         + ")";
 
@@ -1264,6 +1331,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
 
     assertTrue(bucket.equals("hello3"));
     assertTrue(sumi.doubleValue() == 38.0D);
@@ -1275,6 +1346,11 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 9.5D);
     assertTrue(avgf.doubleValue() == 6.5D);
     assertTrue(count.doubleValue() == 4);
+    assertTrue(stdi.doubleValue() == 4.509249752822894D);
+    assertTrue(stdf.doubleValue() == 2.6457513110645907D);
+    assertTrue(peri.doubleValue() == 11.0D);
+    assertTrue(perf.doubleValue() ==  7.0D);
+
 
     tuple = tuples.get(1);
     bucket = tuple.getString("a_s");
@@ -1287,6 +1363,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
 
     assertTrue(bucket.equals("hello0"));
     assertTrue(sumi.doubleValue() == 17.0D);
@@ -1298,6 +1378,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 4.25D);
     assertTrue(avgf.doubleValue() == 4.5D);
     assertTrue(count.doubleValue() == 4);
+    assertTrue(stdi.doubleValue() == 6.551081335677848D);
+    assertTrue(stdf.doubleValue() == 4.041451884327381D);
+    assertTrue(peri.doubleValue() == 1.5D);
+    assertTrue(perf.doubleValue() ==  3.5D);
 
     tuple = tuples.get(2);
     bucket = tuple.getString("a_s");
@@ -1310,6 +1394,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
 
     assertTrue(bucket.equals("hello4"));
     assertTrue(sumi.longValue() == 15);
@@ -1321,6 +1409,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 7.5D);
     assertTrue(avgf.doubleValue() == 5.5D);
     assertTrue(count.doubleValue() == 2);
+    assertTrue(stdi.doubleValue() == 4.949747468305833D);
+    assertTrue(stdf.doubleValue() == 2.1213203435596424D);
+    assertTrue(peri.doubleValue() == 7.5D);
+    assertTrue(perf.doubleValue() ==  5.5D);
 
 
     clause = "facet("
@@ -1477,6 +1569,8 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         +   "min(a_i), min(a_f), "
         +   "max(a_i), max(a_f), "
         +   "avg(a_i), avg(a_f), "
+        +   "std(a_i), std(a_f),"
+        +   "per(a_i, 50), per(a_f, 50),"
         +   "count(*)"
         + ")";
 
@@ -1497,7 +1591,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
-
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
 
     assertTrue(bucket.equals("hello4"));
     assertTrue(sumi.longValue() == 15);
@@ -1509,7 +1606,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 7.5D);
     assertTrue(avgf.doubleValue() == 5.5D);
     assertTrue(count.doubleValue() == 2);
-
+    assertTrue(stdi.doubleValue() == 4.949747468305833D);
+    assertTrue(stdf.doubleValue() == 2.1213203435596424D);
+    assertTrue(peri.doubleValue() == 7.5D);
+    assertTrue(perf.doubleValue() ==  5.5D);
 
     tuple = tuples.get(1);
     bucket = tuple.getString("a_s");
@@ -1522,6 +1622,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
 
     assertTrue(bucket.equals("hello3"));
     assertTrue(sumi.doubleValue() == 38.0D);
@@ -1533,6 +1637,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 9.5D);
     assertTrue(avgf.doubleValue() == 6.5D);
     assertTrue(count.doubleValue() == 4);
+    assertTrue(stdi.doubleValue() == 4.509249752822894D);
+    assertTrue(stdf.doubleValue() == 2.6457513110645907D);
+    assertTrue(peri.doubleValue() == 11.0D);
+    assertTrue(perf.doubleValue() ==  7.0D);
 
 
     tuple = tuples.get(2);
@@ -1546,6 +1654,10 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     avgi = tuple.getDouble("avg(a_i)");
     avgf = tuple.getDouble("avg(a_f)");
     count = tuple.getDouble("count(*)");
+    stdi = tuple.getDouble("std(a_i)");
+    stdf = tuple.getDouble("std(a_f)");
+    peri = tuple.getDouble("per(a_i,50)");
+    perf = tuple.getDouble("per(a_f,50)");
 
     assertTrue(bucket.equals("hello0"));
     assertTrue(sumi.doubleValue() == 17.0D);
@@ -1557,6 +1669,11 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(avgi.doubleValue() == 4.25D);
     assertTrue(avgf.doubleValue() == 4.5D);
     assertTrue(count.doubleValue() == 4);
+    assertTrue(stdi.doubleValue() == 6.551081335677848D);
+    assertTrue(stdf.doubleValue() == 4.041451884327381D);
+    assertTrue(peri.doubleValue() == 1.5D);
+    assertTrue(perf.doubleValue() ==  3.5D);
+
 
     //Test index sort
 
@@ -1810,14 +1927,16 @@ public class StreamExpressionTest extends SolrCloudTestCase {
       .withFunctionName("min", MinMetric.class)
       .withFunctionName("max", MaxMetric.class)
       .withFunctionName("avg", MeanMetric.class)
-      .withFunctionName("count", CountMetric.class);
+      .withFunctionName("count", CountMetric.class)
+      .withFunctionName("std", StdMetric.class)
+      .withFunctionName("per", PercentileMetric.class);
 
     // Basic test
     clause = "facet("
               +   "collection1, "
               +   "q=\"*:*\", "
               +   "buckets=\"level1_s, level2_s\", "
-              +   "bucketSorts=\"sum(a_i) desc, sum(a_i) desc)\", "
+              +   "bucketSorts=\"sum(a_i) desc, sum(a_i) desc\", "
               +   "bucketSizeLimit=100, "
               +   "sum(a_i), count(*)"
               + ")";
@@ -1897,7 +2016,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         +   "collection1, "
         +   "q=\"*:*\", "
         +   "buckets=\"level1_s, level2_s\", "
-        +   "bucketSorts=\"level1_s desc, level2_s desc)\", "
+        +   "bucketSorts=\"level1_s desc, level2_s desc\", "
         +   "bucketSizeLimit=100, "
         +   "sum(a_i), count(*)"
         + ")";
@@ -1972,6 +2091,89 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(bucket2.equals("a"));
     assertTrue(sumi.longValue() == 2);
     assertTrue(count.doubleValue() == 2);
+
+    //Add sorts for percentile
+
+    clause = "facet("
+        +   "collection1, "
+        +   "q=\"*:*\", "
+        +   "buckets=\"level1_s, level2_s\", "
+        +   "bucketSorts=\"per(a_i, 50) desc, std(a_i) desc\", "
+        +   "bucketSizeLimit=100, "
+        +   "std(a_i), per(a_i,50)"
+        + ")";
+
+    stream = factory.constructStream(clause);
+    tuples = getTuples(stream);
+
+    assert(tuples.size() == 6);
+
+    tuple = tuples.get(0);
+    bucket1 = tuple.getString("level1_s");
+    bucket2 = tuple.getString("level2_s");
+    double stdi = tuple.getDouble("std(a_i)");
+    double peri = tuple.getDouble("per(a_i,50)");
+
+    assertTrue(bucket1.equals("hello3"));
+    assertTrue(bucket2.equals("b"));
+    assertTrue(stdi  == 1.5275252316519468D);
+    assertTrue(peri == 12.0D);
+
+    tuple = tuples.get(1);
+    bucket1 = tuple.getString("level1_s");
+    bucket2 = tuple.getString("level2_s");
+    stdi = tuple.getDouble("std(a_i)");
+    peri = tuple.getDouble("per(a_i,50)");
+
+    assertTrue(bucket1.equals("hello4"));
+    assertTrue(bucket2.equals("b"));
+    assertTrue(stdi  == 0.0D);
+    assertTrue(peri == 11.0);
+
+    tuple = tuples.get(2);
+    bucket1 = tuple.getString("level1_s");
+    bucket2 = tuple.getString("level2_s");
+    stdi = tuple.getDouble("std(a_i)");
+    peri = tuple.getDouble("per(a_i,50)");
+
+    assertTrue(bucket1.equals("hello0"));
+    assertTrue(bucket2.equals("b"));
+    assertTrue(stdi  == 9.192388155425117D);
+    assertTrue(peri == 7.5D);
+
+    tuple = tuples.get(3);
+    bucket1 = tuple.getString("level1_s");
+    bucket2 = tuple.getString("level2_s");
+    stdi = tuple.getDouble("std(a_i)");
+    peri = tuple.getDouble("per(a_i,50)");
+
+    assertTrue(bucket1.equals("hello4"));
+    assertTrue(bucket2.equals("a"));
+    assertTrue(stdi  == 0.0D);
+    assertTrue(peri == 4.0D);
+
+    tuple = tuples.get(4);
+    bucket1 = tuple.getString("level1_s");
+    bucket2 = tuple.getString("level2_s");
+    stdi = tuple.getDouble("std(a_i)");
+    peri = tuple.getDouble("per(a_i,50)");
+
+    assertTrue(bucket1.equals("hello3"));
+    assertTrue(bucket2.equals("a"));
+    assertTrue(stdi  == 0.0D);
+    assertTrue(peri == 3.0D);
+
+    tuple = tuples.get(5);
+    bucket1 = tuple.getString("level1_s");
+    bucket2 = tuple.getString("level2_s");
+    stdi = tuple.getDouble("std(a_i)");
+    peri = tuple.getDouble("per(a_i,50)");
+
+    assertTrue(bucket1.equals("hello0"));
+    assertTrue(bucket2.equals("a"));
+    assertTrue(stdi  == 1.4142135623730951D);
+    assertTrue(peri == 1.0D);
+
   }
 
   @Test
@@ -2447,7 +2649,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         "end=\"2017-12-01T01:00:00.000Z\", " +
         "gap=\"+1YEAR\", " +
         "field=\"test_dt\", " +
-        "count(*), sum(price_f), max(price_f), min(price_f))";
+        "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))";
     ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", expr);
     paramsLoc.set("qt", "/stream");
@@ -2465,38 +2667,52 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D));
     assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D));
     assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D));
+    assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(100D));
+    assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(100D));
 
     assertTrue(tuples.get(1).get("test_dt").equals("2014-01-01T01:00:00Z"));
     assertTrue(tuples.get(1).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D));
     assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D));
     assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D));
+    assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(500D));
+    assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(500D));
 
     assertTrue(tuples.get(2).get("test_dt").equals("2015-01-01T01:00:00Z"));
     assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D));
     assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D));
     assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D));
+    assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(300D));
+    assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(300D));
 
     assertTrue(tuples.get(3).get("test_dt").equals("2016-01-01T01:00:00Z"));
     assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D));
     assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D));
     assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D));
+    assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(400D));
+    assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(400D));
 
     assertTrue(tuples.get(4).get("test_dt").equals("2017-01-01T01:00:00Z"));
     assertEquals((long)tuples.get(4).getLong("count(*)"), 0L);
     assertEquals(tuples.get(4).getDouble("sum(price_f)"), 0D, 0);
     assertEquals(tuples.get(4).getDouble("max(price_f)"),0D, 0);
     assertEquals(tuples.get(4).getDouble("min(price_f)"), 0D, 0);
-
+    assertTrue(tuples.get(4).getDouble("avg(price_f)").equals(0D));
+    assertTrue(tuples.get(4).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(4).getDouble("per(price_f,50)").equals(0D));
 
     expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " +
         "end=\"2016-12-01T01:00:00.000Z\", " +
         "gap=\"+1YEAR\", " +
         "field=\"test_dt\", " +
         "format=\"yyyy\", " +
-        "count(*), sum(price_f), max(price_f), min(price_f))";
+        "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))";
     paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", expr);
     paramsLoc.set("qt", "/stream");
@@ -2512,31 +2728,45 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D));
     assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D));
     assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D));
+    assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(100D));
+    assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(100D));
 
     assertTrue(tuples.get(1).get("test_dt").equals("2014"));
     assertTrue(tuples.get(1).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D));
     assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D));
     assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D));
+    assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(500D));
+    assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(500D));
+
 
     assertTrue(tuples.get(2).get("test_dt").equals("2015"));
     assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D));
     assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D));
     assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D));
+    assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(300D));
+    assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(300D));
 
     assertTrue(tuples.get(3).get("test_dt").equals("2016"));
     assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D));
     assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D));
     assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D));
+    assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(400D));
+    assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(400D));
+
 
     expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2013-01-01T01:00:00.000Z\", " +
         "end=\"2016-12-01T01:00:00.000Z\", " +
         "gap=\"+1YEAR\", " +
         "field=\"test_dt\", " +
         "format=\"yyyy-MM\", " +
-        "count(*), sum(price_f), max(price_f), min(price_f))";
+        "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))";
     paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", expr);
     paramsLoc.set("qt", "/stream");
@@ -2552,24 +2782,36 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(tuples.get(0).getDouble("sum(price_f)").equals(10000D));
     assertTrue(tuples.get(0).getDouble("max(price_f)").equals(100D));
     assertTrue(tuples.get(0).getDouble("min(price_f)").equals(100D));
+    assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(100D));
+    assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(100D));
 
     assertTrue(tuples.get(1).get("test_dt").equals("2014-01"));
     assertTrue(tuples.get(1).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(25000D));
     assertTrue(tuples.get(1).getDouble("max(price_f)").equals(500D));
     assertTrue(tuples.get(1).getDouble("min(price_f)").equals(500D));
+    assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(500D));
+    assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(500D));
 
     assertTrue(tuples.get(2).get("test_dt").equals("2015-01"));
     assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(15000D));
     assertTrue(tuples.get(2).getDouble("max(price_f)").equals(300D));
     assertTrue(tuples.get(2).getDouble("min(price_f)").equals(300D));
+    assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(300D));
+    assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(300D));
 
     assertTrue(tuples.get(3).get("test_dt").equals("2016-01"));
     assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(20000D));
     assertTrue(tuples.get(3).getDouble("max(price_f)").equals(400D));
     assertTrue(tuples.get(3).getDouble("min(price_f)").equals(400D));
+    assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(400D));
+    assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(400D));
 
 
     expr = "timeseries("+COLLECTIONORALIAS+", q=\"*:*\", start=\"2012-01-01T01:00:00.000Z\", " +
@@ -2577,7 +2819,7 @@ public class StreamExpressionTest extends SolrCloudTestCase {
         "gap=\"+1YEAR\", " +
         "field=\"test_dt\", " +
         "format=\"yyyy-MM\", " +
-        "count(*), sum(price_f), max(price_f), min(price_f))";
+        "count(*), sum(price_f), max(price_f), min(price_f), avg(price_f), std(price_f), per(price_f, 50))";
     paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", expr);
     paramsLoc.set("qt", "/stream");
@@ -2592,30 +2834,45 @@ public class StreamExpressionTest extends SolrCloudTestCase {
     assertTrue(tuples.get(0).getDouble("sum(price_f)") == 0);
     assertTrue(tuples.get(0).getDouble("max(price_f)") == 0);
     assertTrue(tuples.get(0).getDouble("min(price_f)") == 0);
+    assertTrue(tuples.get(0).getDouble("avg(price_f)").equals(0D));
+    assertTrue(tuples.get(0).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(0).getDouble("per(price_f,50)").equals(0D));
 
     assertTrue(tuples.get(1).get("test_dt").equals("2013-01"));
     assertTrue(tuples.get(1).getLong("count(*)").equals(100L));
     assertTrue(tuples.get(1).getDouble("sum(price_f)").equals(10000D));
     assertTrue(tuples.get(1).getDouble("max(price_f)").equals(100D));
     assertTrue(tuples.get(1).getDouble("min(price_f)").equals(100D));
+    assertTrue(tuples.get(1).getDouble("avg(price_f)").equals(100D));
+    assertTrue(tuples.get(1).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(1).getDouble("per(price_f,50)").equals(100D));
 
     assertTrue(tuples.get(2).get("test_dt").equals("2014-01"));
     assertTrue(tuples.get(2).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(2).getDouble("sum(price_f)").equals(25000D));
     assertTrue(tuples.get(2).getDouble("max(price_f)").equals(500D));
     assertTrue(tuples.get(2).getDouble("min(price_f)").equals(500D));
+    assertTrue(tuples.get(2).getDouble("avg(price_f)").equals(500D));
+    assertTrue(tuples.get(2).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(2).getDouble("per(price_f,50)").equals(500D));
 
     assertTrue(tuples.get(3).get("test_dt").equals("2015-01"));
     assertTrue(tuples.get(3).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(3).getDouble("sum(price_f)").equals(15000D));
     assertTrue(tuples.get(3).getDouble("max(price_f)").equals(300D));
     assertTrue(tuples.get(3).getDouble("min(price_f)").equals(300D));
+    assertTrue(tuples.get(3).getDouble("avg(price_f)").equals(300D));
+    assertTrue(tuples.get(3).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(3).getDouble("per(price_f,50)").equals(300D));
 
     assertTrue(tuples.get(4).get("test_dt").equals("2016-01"));
     assertTrue(tuples.get(4).getLong("count(*)").equals(50L));
     assertTrue(tuples.get(4).getDouble("sum(price_f)").equals(20000D));
     assertTrue(tuples.get(4).getDouble("max(price_f)").equals(400D));
     assertTrue(tuples.get(4).getDouble("min(price_f)").equals(400D));
+    assertTrue(tuples.get(4).getDouble("avg(price_f)").equals(400D));
+    assertTrue(tuples.get(4).getDouble("std(price_f)").equals(0D));
+    assertTrue(tuples.get(4).getDouble("per(price_f,50)").equals(400D));
   }
 
   @Test


[lucene-solr] 07/47: SOLR-14486: Autoscaling simulation framework should stop using /clusterstate.json.

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 51c8e076f0e612acd6645873ca176d4dbb572f4e
Author: Andrzej Bialecki <ab...@apache.org>
AuthorDate: Tue May 19 18:52:47 2020 +0200

    SOLR-14486: Autoscaling simulation framework should stop using /clusterstate.json.
---
 solr/CHANGES.txt                                   |   3 +
 .../cloud/autoscaling/sim/SimCloudManager.java     |   1 -
 .../autoscaling/sim/SimClusterStateProvider.java   | 279 ++++++++++++---------
 .../sim/SnapshotClusterStateProvider.java          |  32 ++-
 .../autoscaling/sim/TestSnapshotCloudManager.java  |  10 +-
 5 files changed, 207 insertions(+), 118 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2f878d0..7105d5c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -71,6 +71,9 @@ Other Changes
   If you have security concerns or other reasons to disable the Admin UI, you can modify `SOLR_ADMIN_UI_DISABLED`
   `solr.in.sh`/`solr.in.cmd` at start. (marcussorealheis)
 
+* SOLR-14486: Autoscaling simulation framework no longer creates /clusterstate.json (format 1),
+  instead it creates individual per-collection /state.json files (format 2). (ab)
+
 ==================  8.6.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index 9b9352b..aa2d7d0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -181,7 +181,6 @@ public class SimCloudManager implements SolrCloudManager {
     if (distribStateManager == null) {
       this.stateManager =  new SimDistribStateManager(SimDistribStateManager.createNewRootNode());
       // init common paths
-      stateManager.makePath(ZkStateReader.CLUSTER_STATE);
       stateManager.makePath(ZkStateReader.CLUSTER_PROPS);
       stateManager.makePath(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH);
       stateManager.makePath(ZkStateReader.LIVE_NODES_ZKNODE);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index df14c76..b76f9b5 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -48,7 +48,9 @@ import java.util.stream.Collectors;
 import com.google.common.util.concurrent.AtomicDouble;
 import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
+import org.apache.solr.client.solrj.cloud.autoscaling.AlreadyExistsException;
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
+import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException;
 import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
@@ -98,6 +100,7 @@ import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.update.SolrIndexSplitter;
 import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -119,8 +122,8 @@ import static org.apache.solr.common.params.CommonParams.NAME;
  *     <li>using autoscaling policy for replica placements</li>
  *     <li>maintaining and up-to-date list of /live_nodes and nodeAdded / nodeLost markers</li>
  *     <li>running a simulated leader election on collection changes (with throttling), when needed</li>
- *     <li>maintaining an up-to-date /clusterstate.json (single file format), which also tracks replica states,
- *     leader election changes, replica property changes, etc. Note: this file is only written,
+ *     <li>maintaining an up-to-date /state.json per-collection files, which also track replica states,
+ *     leader election changes, replica property changes, etc. Note: these files are only written,
  *     but never read by the framework!</li>
  *     <li>maintaining an up-to-date /clusterprops.json. Note: this file is only written, but never read by the
  *     framework!</li>
@@ -153,12 +156,131 @@ public class SimClusterStateProvider implements ClusterStateProvider {
   private final Map<String, Map<String, Long>> opDelays = new ConcurrentHashMap<>();
 
 
-  private volatile int clusterStateVersion = 0;
   private volatile String overseerLeader = null;
 
   private volatile Map<String, Object> lastSavedProperties = null;
 
-  private final AtomicReference<Map<String, DocCollection>> collectionsStatesRef = new AtomicReference<>();
+  private class CachedCollectionRef {
+    private final String name;
+    private int zkVersion;
+    private DocCollection coll;
+    ReentrantLock lock = new ReentrantLock();
+
+    CachedCollectionRef(String name, int zkVersion) {
+      this.name = name;
+      this.zkVersion = zkVersion;
+    }
+
+    public DocCollection getColl() throws InterruptedException, IOException {
+      DocCollection dc = coll;
+      if (dc != null) {
+        return dc;
+      }
+      lock.lock();
+      try {
+        if (coll != null) {
+          return coll;
+        } else {
+          Map<String, Map<String, Map<String, Replica>>> collMap = new HashMap<>();
+          nodeReplicaMap.forEach((n, replicas) -> {
+            synchronized (replicas) {
+              replicas.forEach(ri -> {
+                if (!ri.getCollection().equals(name)) {
+                  return;
+                }
+                Map<String, Object> props;
+                synchronized (ri) {
+                  props = new HashMap<>(ri.getVariables());
+                }
+                props.put(ZkStateReader.NODE_NAME_PROP, n);
+                props.put(ZkStateReader.CORE_NAME_PROP, ri.getCore());
+                props.put(ZkStateReader.REPLICA_TYPE, ri.getType().toString());
+                props.put(ZkStateReader.STATE_PROP, ri.getState().toString());
+                Replica r = new Replica(ri.getName(), props, ri.getCollection(), ri.getShard());
+                collMap.computeIfAbsent(ri.getCollection(), c -> new HashMap<>())
+                    .computeIfAbsent(ri.getShard(), s -> new HashMap<>())
+                    .put(ri.getName(), r);
+              });
+            }
+          });
+
+          // add empty slices
+          sliceProperties.forEach((c, perSliceProps) -> {
+            if (!c.equals(name)) {
+              return;
+            }
+            perSliceProps.forEach((slice, props) -> {
+              collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>()).computeIfAbsent(slice, s -> new ConcurrentHashMap<>());
+            });
+          });
+          // add empty collections
+          collProperties.keySet().forEach(c -> {
+            if (!c.equals(name)) {
+              return;
+            }
+            collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>());
+          });
+
+          Map<String, Map<String, Replica>> shards = collMap.get(name);
+          Map<String, Slice> slices = new HashMap<>();
+          shards.forEach((s, replicas) -> {
+            Map<String, Object> sliceProps = sliceProperties.computeIfAbsent(name, c -> new ConcurrentHashMap<>()).computeIfAbsent(s, sl -> new ConcurrentHashMap<>());
+            Slice slice = new Slice(s, replicas, sliceProps, name);
+            slices.put(s, slice);
+          });
+          Map<String, Object> collProps = collProperties.computeIfAbsent(name, c -> new ConcurrentHashMap<>());
+          Map<String, Object> routerProp = (Map<String, Object>) collProps.getOrDefault(DocCollection.DOC_ROUTER, Collections.singletonMap("name", DocRouter.DEFAULT_NAME));
+          DocRouter router = DocRouter.getDocRouter((String)routerProp.getOrDefault("name", DocRouter.DEFAULT_NAME));
+          String path = ZkStateReader.getCollectionPath(name);
+          coll = new DocCollection(name, slices, collProps, router, zkVersion + 1, path);
+          try {
+            SimDistribStateManager stateManager = cloudManager.getSimDistribStateManager();
+            byte[] data = Utils.toJSON(Collections.singletonMap(name, coll));
+            if (!stateManager.hasData(path)) {
+              try {
+                stateManager.makePath(path, data, CreateMode.PERSISTENT, true);
+              } catch (AlreadyExistsException e) {
+                // try updating
+                stateManager.setData(path, data, zkVersion);
+              }
+            } else {
+              stateManager.setData(path, data, zkVersion);
+            }
+            // verify version
+            VersionedData vd = stateManager.getData(path);
+            assert vd.getVersion() == zkVersion + 1;
+            zkVersion++;
+          } catch (KeeperException | BadVersionException e) {
+            // should never happen?
+            throw new RuntimeException("error saving " + coll, e);
+          }
+        }
+      } finally {
+        lock.unlock();
+      }
+      return coll;
+    }
+
+    public int getZkVersion() {
+      lock.lock();
+      try {
+        return zkVersion;
+      } finally {
+        lock.unlock();
+      }
+    }
+
+    public void invalidate() {
+      lock.lock();
+      try {
+        coll = null;
+      } finally {
+        lock.unlock();
+      }
+    }
+  }
+
+  private final Map<String, CachedCollectionRef> collectionsStatesRef = new ConcurrentHashMap<>();
 
   private final Random bulkUpdateRandom = new Random(0);
 
@@ -207,6 +329,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       sliceProperties.clear();
       nodeReplicaMap.clear();
       liveNodes.clear();
+      collectionsStatesRef.clear();
       for (String nodeId : stateManager.listData(ZkStateReader.LIVE_NODES_ZKNODE)) {
         if (stateManager.hasData(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId)) {
           stateManager.removeData(ZkStateReader.LIVE_NODES_ZKNODE + "/" + nodeId, -1);
@@ -223,6 +346,8 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         createEphemeralLiveNode(nodeId);
       }
       initialState.forEachCollection(dc -> {
+        // DocCollection will be created later
+        collectionsStatesRef.put(dc.getName(), new CachedCollectionRef(dc.getName(), dc.getZNodeVersion()));
         collProperties.computeIfAbsent(dc.getName(), name -> new ConcurrentHashMap<>()).putAll(dc.getProperties());
         opDelays.computeIfAbsent(dc.getName(), Utils.NEW_HASHMAP_FUN).putAll(defaultOpDelays);
         dc.getSlices().forEach(s -> {
@@ -248,7 +373,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           });
         });
       });
-      collectionsStatesRef.set(null);
     } finally {
       lock.unlock();
     }
@@ -287,8 +411,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     return nodes.get(random.nextInt(nodes.size()));
   }
 
-  // todo: maybe hook up DistribStateManager /clusterstate.json watchers?
-
   private ReplicaInfo getReplicaInfo(Replica r) {
     final List<ReplicaInfo> list = nodeReplicaMap.computeIfAbsent
       (r.getNodeName(), Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN);
@@ -331,8 +453,8 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       // mark every replica on that node as down
       boolean res = liveNodes.remove(nodeId);
       setReplicaStates(nodeId, Replica.State.DOWN, collections);
-      if (!collections.isEmpty()) {
-        collectionsStatesRef.set(null);
+      for (String collection : collections) {
+        collectionsStatesRef.get(collection).invalidate();;
       }
       // remove ephemeral nodes
       stateManager.getRoot().removeEphemeralChildren(nodeId);
@@ -363,7 +485,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     try {
       Set<String> myNodes = new HashSet<>(nodeReplicaMap.keySet());
       myNodes.removeAll(liveNodes.get());
-      collectionsStatesRef.set(null);
     } finally {
       lock.unlock();
     }
@@ -452,7 +573,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     try {
       setReplicaStates(nodeId, Replica.State.ACTIVE, collections);
       if (!collections.isEmpty()) {
-        collectionsStatesRef.set(null);
+        collections.forEach(c -> collectionsStatesRef.get(c).invalidate());
         simRunLeaderElection(collections, true);
         return true;
       } else {
@@ -604,7 +725,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       cloudManager.getMetricManager().registerGauge(null, registry,
           () -> replicaSize, "", true, Type.CORE_IDX.metricsAttribute);
       // at this point nuke our cached DocCollection state
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(replicaInfo.getCollection()).invalidate();
       log.trace("-- simAddReplica {}", replicaInfo);
       if (runLeaderElection) {
         simRunLeaderElection(replicaInfo.getCollection(), replicaInfo.getShard(), true);
@@ -633,7 +754,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             colShardReplicaMap.computeIfAbsent(ri.getCollection(), c -> new ConcurrentHashMap<>())
               .computeIfAbsent(ri.getShard(), s -> new ArrayList<>())
               .remove(ri);
-            collectionsStatesRef.set(null);
+            collectionsStatesRef.get(ri.getCollection()).invalidate();
 
             opDelay(ri.getCollection(), CollectionParams.CollectionAction.DELETEREPLICA.name());
 
@@ -669,26 +790,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
   }
 
   /**
-   * Save clusterstate.json to {@link DistribStateManager}.
-   * @return saved state
-   */
-  private ClusterState saveClusterState(ClusterState state) throws IOException {
-    ensureNotClosed();
-    byte[] data = Utils.toJSON(state);
-    try {
-      VersionedData oldData = stateManager.getData(ZkStateReader.CLUSTER_STATE);
-      int version = oldData != null ? oldData.getVersion() : 0;
-      assert clusterStateVersion == version : "local clusterStateVersion out of sync";
-      stateManager.setData(ZkStateReader.CLUSTER_STATE, data, version);
-      log.debug("** saved cluster state version {}", version);
-      clusterStateVersion++;
-    } catch (Exception e) {
-      throw new IOException(e);
-    }
-    return state;
-  }
-
-  /**
    * Delay an operation by a configured amount.
    * @param collection collection name
    * @param op operation name.
@@ -725,7 +826,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     if (saveClusterState) {
       lock.lockInterruptibly();
       try {
-        collectionsStatesRef.set(null);
+        collections.forEach(c -> collectionsStatesRef.get(c).invalidate());
       } finally {
         lock.unlock();
       }
@@ -865,13 +966,13 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         }
         if (log.isDebugEnabled()) {
           log.debug("-- elected new leader for {} / {} (currentVersion={}): {}", collection,
-              s.getName(), clusterStateVersion, ri);
+              s.getName(), col.getZNodeVersion(), ri);
         }
         stateChanged.set(true);
       }
     } finally {
       if (stateChanged.get() || saveState) {
-        collectionsStatesRef.set(null);
+        collectionsStatesRef.get(collection).invalidate();
       }
       lock.unlock();
     }
@@ -889,7 +990,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
     boolean waitForFinalState = props.getBool(CommonAdminParams.WAIT_FOR_FINAL_STATE, false);
     final String collectionName = props.getStr(NAME);
-    log.debug("-- simCreateCollection {}, currentVersion={}", collectionName, clusterStateVersion);
+    log.debug("-- simCreateCollection {}", collectionName);
 
     String router = props.getStr("router.name", DocRouter.DEFAULT_NAME);
     String policy = props.getStr(Policy.POLICY);
@@ -903,12 +1004,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     CreateCollectionCmd.checkReplicaTypes(props);
 
     // always force getting fresh state
-    lock.lockInterruptibly();
-    try {
-      collectionsStatesRef.set(null);
-    } finally {
-      lock.unlock();
-    }
     final ClusterState clusterState = getClusterState();
 
     String withCollection = props.getStr(CollectionAdminParams.WITH_COLLECTION);
@@ -962,8 +1057,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             CollectionAdminParams.COLOCATED_WITH, collectionName);
         cmd = new CollectionMutator(cloudManager).modifyCollection(clusterState,message);
       }
-      // force recreation of collection states
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.put(collectionName, new CachedCollectionRef(collectionName, 0));
 
     } finally {
       lock.unlock();
@@ -1043,7 +1137,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     // force recreation of collection states
     lock.lockInterruptibly();
     try {
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(collectionName).invalidate();
     } finally {
       lock.unlock();
     }
@@ -1057,7 +1151,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       }
     }
     results.add("success", "");
-    log.debug("-- finished createCollection {}, currentVersion={}", collectionName, clusterStateVersion);
+    log.debug("-- finished createCollection {}", collectionName);
   }
 
   /**
@@ -1106,7 +1200,8 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             }
           }
         });
-      collectionsStatesRef.set(null);
+      cloudManager.getDistribStateManager().removeRecursively(ZkStateReader.getCollectionPath(collection), true, true);
+      collectionsStatesRef.remove(collection);
       results.add("success", "");
     } catch (Exception e) {
       log.warn("Exception", e);
@@ -1121,7 +1216,13 @@ public class SimClusterStateProvider implements ClusterStateProvider {
   public void simDeleteAllCollections() throws Exception {
     lock.lockInterruptibly();
     try {
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.keySet().forEach(name -> {
+        try {
+          cloudManager.getDistribStateManager().removeRecursively(ZkStateReader.getCollectionPath(name), true, true);
+        } catch (Exception e) {
+          log.error("Unable to delete collection state.json");
+        }
+      });
       
       collProperties.clear();
       sliceProperties.clear();
@@ -1468,7 +1569,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       }
 
       // invalidate cached state
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(collectionName).invalidate();
     } finally {
       SplitShardCmd.unlockForSplit(cloudManager, collectionName, sliceName.get());
       lock.unlock();
@@ -1516,7 +1617,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             }
           }
         });
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(collectionName).invalidate();
       results.add("success", "");
     } catch (Exception e) {
       results.add("failure", e.toString());
@@ -2004,7 +2105,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
         props.clear();
         props.putAll(properties);
       }
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(coll).invalidate();
     } finally {
       lock.unlock();
     }
@@ -2025,7 +2126,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       } else {
         props.put(key, value);
       }
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(coll).invalidate();
     } finally {
       lock.unlock();
     }
@@ -2046,7 +2147,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       if (properties != null) {
         sliceProps.putAll(properties);
       }
-      collectionsStatesRef.set(null);
+      collectionsStatesRef.get(coll).invalidate();
     } finally {
       lock.unlock();
     }
@@ -2247,7 +2348,6 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     lock.lockInterruptibly();
     try {
       final Map<String, Map<String, Object>> stats = new TreeMap<>();
-      collectionsStatesRef.set(null);
       ClusterState state = getClusterState();
       state.forEachCollection(coll -> {
         Map<String, Object> perColl = new LinkedHashMap<>();
@@ -2286,7 +2386,9 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             }
             continue;
           }
-          AtomicLong buffered = (AtomicLong)sliceProperties.get(coll.getName()).get(s.getName()).get(BUFFERED_UPDATES);
+          AtomicLong buffered = (AtomicLong)sliceProperties
+              .getOrDefault(coll.getName(), Collections.emptyMap())
+              .getOrDefault(s.getName(), Collections.emptyMap()).get(BUFFERED_UPDATES);
           if (buffered != null) {
             bufferedDocs += buffered.get();
           }
@@ -2389,7 +2491,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       lock.lockInterruptibly();
       try {
         Map<String, DocCollection> states = getCollectionStates();
-        ClusterState state = new ClusterState(clusterStateVersion, liveNodes.get(), states);
+        ClusterState state = new ClusterState(0, liveNodes.get(), states);
         return state;
       } finally {
         lock.unlock();
@@ -2399,65 +2501,18 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
   }
 
-  // this method uses a simple cache in collectionsStatesRef. Operations that modify
-  // cluster state should always reset this cache so that the changes become visible
   private Map<String, DocCollection> getCollectionStates() throws IOException, InterruptedException {
     lock.lockInterruptibly();
     try {
-      Map<String, DocCollection> collectionStates = collectionsStatesRef.get();
-      if (collectionStates != null) {
-        return collectionStates;
-      }
-      collectionsStatesRef.set(null);
-      log.debug("** creating new collection states, currentVersion={}", clusterStateVersion);
-      Map<String, Map<String, Map<String, Replica>>> collMap = new HashMap<>();
-      nodeReplicaMap.forEach((n, replicas) -> {
-          synchronized (replicas) {
-            replicas.forEach(ri -> {
-                Map<String, Object> props;
-                synchronized (ri) {
-                  props = new HashMap<>(ri.getVariables());
-                }
-                props.put(ZkStateReader.NODE_NAME_PROP, n);
-                props.put(ZkStateReader.CORE_NAME_PROP, ri.getCore());
-                props.put(ZkStateReader.REPLICA_TYPE, ri.getType().toString());
-                props.put(ZkStateReader.STATE_PROP, ri.getState().toString());
-                Replica r = new Replica(ri.getName(), props, ri.getCollection(), ri.getShard());
-                collMap.computeIfAbsent(ri.getCollection(), c -> new HashMap<>())
-                  .computeIfAbsent(ri.getShard(), s -> new HashMap<>())
-                  .put(ri.getName(), r);
-              });
-          }
-        });
-
-      // add empty slices
-      sliceProperties.forEach((c, perSliceProps) -> {
-        perSliceProps.forEach((slice, props) -> {
-          collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>()).computeIfAbsent(slice, s -> new ConcurrentHashMap<>());
-        });
-      });
-      // add empty collections
-      collProperties.keySet().forEach(c -> {
-        collMap.computeIfAbsent(c, co -> new ConcurrentHashMap<>());
-      });
-
-      Map<String, DocCollection> res = new HashMap<>();
-      collMap.forEach((coll, shards) -> {
-        Map<String, Slice> slices = new HashMap<>();
-        shards.forEach((s, replicas) -> {
-          Map<String, Object> sliceProps = sliceProperties.computeIfAbsent(coll, c -> new ConcurrentHashMap<>()).computeIfAbsent(s, sl -> new ConcurrentHashMap<>());
-          Slice slice = new Slice(s, replicas, sliceProps, coll);
-          slices.put(s, slice);
-        });
-        Map<String, Object> collProps = collProperties.computeIfAbsent(coll, c -> new ConcurrentHashMap<>());
-        Map<String, Object> routerProp = (Map<String, Object>) collProps.getOrDefault(DocCollection.DOC_ROUTER, Collections.singletonMap("name", DocRouter.DEFAULT_NAME));
-        DocRouter router = DocRouter.getDocRouter((String)routerProp.getOrDefault("name", DocRouter.DEFAULT_NAME));
-        DocCollection dc = new DocCollection(coll, slices, collProps, router, clusterStateVersion, ZkStateReader.CLUSTER_STATE);
-        res.put(coll, dc);
+      Map<String, DocCollection> collectionStates = new HashMap<>();
+      collectionsStatesRef.forEach((name, cached) -> {
+        try {
+          collectionStates.put(name, cached.getColl());
+        } catch (Exception e) {
+          throw new RuntimeException("error building collection " + name + " state", e);
+        }
       });
-      saveClusterState(new ClusterState(clusterStateVersion, liveNodes.get(), res));
-      collectionsStatesRef.set(res);
-      return res;
+      return collectionStates;
     } finally {
       lock.unlock();
     }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java
index 3655fb3..351265d 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java
@@ -21,6 +21,7 @@ import java.io.UnsupportedEncodingException;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
@@ -55,8 +56,27 @@ public class SnapshotClusterStateProvider implements ClusterStateProvider {
     liveNodes = Set.copyOf((Collection<String>)snapshot.getOrDefault("liveNodes", Collections.emptySet()));
     clusterProperties = (Map<String, Object>)snapshot.getOrDefault("clusterProperties", Collections.emptyMap());
     Map<String, Object> stateMap = new HashMap<>((Map<String, Object>)snapshot.getOrDefault("clusterState", Collections.emptyMap()));
-    Number version = (Number)stateMap.remove("version");
-    clusterState = ClusterState.load(version != null ? version.intValue() : null, stateMap, liveNodes, ZkStateReader.CLUSTER_STATE);
+    Map<String, DocCollection> collectionStates = new HashMap<>();
+    // back-compat with format = 1
+    Integer stateVersion = Integer.valueOf(String.valueOf(stateMap.getOrDefault("version", 0)));
+    stateMap.remove("version");
+    stateMap.forEach((name, state) -> {
+      Map<String, Object> mutableState = (Map<String, Object>)state;
+      Map<String, Object> collMap = (Map<String, Object>) mutableState.get(name);
+      if (collMap == null) {
+        // snapshot in format 1
+        collMap = mutableState;
+        mutableState = Collections.singletonMap(name, state);
+      }
+      Integer version = Integer.parseInt(String.valueOf(collMap.getOrDefault("zNodeVersion", stateVersion)));
+      String path = String.valueOf(collMap.getOrDefault("zNode", ZkStateReader.getCollectionPath(name)));
+      collMap.remove("zNodeVersion");
+      collMap.remove("zNode");
+      byte[] data = Utils.toJSON(mutableState);
+      ClusterState collState = ClusterState.load(version, data, Collections.emptySet(), path);
+      collectionStates.put(name, collState.getCollection(name));
+    });
+    clusterState = new ClusterState(stateVersion, liveNodes, collectionStates);
   }
 
   public Map<String, Object> getSnapshot() {
@@ -67,14 +87,18 @@ public class SnapshotClusterStateProvider implements ClusterStateProvider {
     }
     Map<String, Object> stateMap = new HashMap<>();
     snapshot.put("clusterState", stateMap);
-    stateMap.put("version", clusterState.getZNodeVersion());
     clusterState.forEachCollection(coll -> {
       CharArr out = new CharArr();
       JSONWriter writer = new JSONWriter(out, 2);
       coll.write(writer);
       String json = out.toString();
       try {
-        stateMap.put(coll.getName(), Utils.fromJSON(json.getBytes("UTF-8")));
+        Map<String, Object> collMap = new LinkedHashMap<>((Map<String, Object>)Utils.fromJSON(json.getBytes("UTF-8")));
+        collMap.put("zNodeVersion", coll.getZNodeVersion());
+        collMap.put("zNode", coll.getZNode());
+        // format compatible with the real /state.json, which uses a mini-ClusterState
+        // consisting of a single collection
+        stateMap.put(coll.getName(), Collections.singletonMap(coll.getName(), collMap));
       } catch (UnsupportedEncodingException e) {
         throw new RuntimeException("should not happen!", e);
       }
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
index 6e7f0ea..876c750 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
@@ -229,8 +229,8 @@ public class TestSnapshotCloudManager extends SolrCloudTestCase {
       Pattern.compile("/autoscaling/triggerState/.*"),
       // some triggers may have run after the snapshot was taken
       Pattern.compile("/autoscaling/events/.*"),
-      // we always use format 1 in SimClusterStateProvider
       Pattern.compile("/clusterstate\\.json"),
+      Pattern.compile("/collections/[^/]+?/state.json"),
       // depending on the startup sequence leaders may differ
       Pattern.compile("/collections/[^/]+?/leader_elect/.*"),
       Pattern.compile("/collections/[^/]+?/leaders/.*"),
@@ -255,6 +255,14 @@ public class TestSnapshotCloudManager extends SolrCloudTestCase {
         .filter(STATE_FILTER_FUN).collect(Collectors.toList()));
     Collections.sort(treeOne);
     Collections.sort(treeTwo);
+    if (!treeOne.equals(treeTwo)) {
+      List<String> t1 = new ArrayList<>(treeOne);
+      t1.removeAll(treeTwo);
+      log.warn("Only in tree one: " + t1);
+      List<String> t2 = new ArrayList<>(treeTwo);
+      t2.removeAll(treeOne);
+      log.warn("Only in tree two: " + t2);
+    }
     assertEquals(treeOne, treeTwo);
     for (String path : treeOne) {
       VersionedData vd1 = one.getData(path);


[lucene-solr] 45/47: SOLR-14517 Obey "mm" local param on edismax queries with operators (#1540)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit cacdc8638bfd935855cc00e40e6f94d98e932319
Author: Yuriy Koval <yu...@users.noreply.github.com>
AuthorDate: Mon Jun 1 05:13:11 2020 -0700

    SOLR-14517 Obey "mm" local param on edismax queries with operators (#1540)
    
    Prior to this commit query parsing looked for mm in query-params, but neglected to check local params for a subset of queries.
---
 solr/CHANGES.txt                                   |  3 ++
 .../apache/solr/search/ExtendedDismaxQParser.java  |  2 +-
 .../solr/search/TestExtendedDismaxParser.java      | 41 ++++++++++++++++++----
 3 files changed, 39 insertions(+), 7 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1d13f76..6040f64 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -200,8 +200,11 @@ Bug Fixes
 
 * SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue. (Jakub Zytka, ab)
 
+* SOLR-14517: Dont ignore 'mm' localparam on edismax queries using operators (Yuriy Koval via Jason Gerlowski)
+
 * SOLR-14491: Intercepting internode requests in KerberosPlugin when HTTP/2 client is used (Ishan Chattopadhyaya, Moshe Bla)
 
+
 Other Changes
 ---------------------
 * SOLR-14197: SolrResourceLoader: marked many methods as deprecated, and in some cases rerouted exiting logic to avoid
diff --git a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
index 93aaf28..9c8672c 100644
--- a/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/ExtendedDismaxQParser.java
@@ -412,7 +412,7 @@ public class ExtendedDismaxQParser extends QParser {
       String mmSpec = config.minShouldMatch;
 
       if (foundOperators(clauses, config.lowercaseOperators)) {
-        mmSpec = params.get(DisMaxParams.MM, "0%"); // Use provided mm spec if present, otherwise turn off mm processing
+        mmSpec = config.solrParams.get(DisMaxParams.MM, "0%"); // Use provided mm spec if present, otherwise turn off mm processing
       }
       query = SolrPluginUtils.setMinShouldMatch((BooleanQuery)query, mmSpec, config.mmAutoRelax);
     }
diff --git a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
index 65e1850..e16df5e 100644
--- a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
@@ -1220,12 +1220,8 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=4]");
 
-      assertQ("test minShouldMatch (top level optional terms only and sow=false)",
-          req("q", "stocks oil gold", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold))~1)
-              "qf", "text_sw",
-              "mm", "50%",
-              "sow", sow,
-              "defType", "edismax")
+      assertQ("test minShouldMatch (top level optional terms only) local mm=50%",
+          req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold'}")
           , "*[count(//doc)=4]");
 
       assertQ("test minShouldMatch (top level optional and negative terms mm=50%)",
@@ -1236,6 +1232,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=3]");
 
+      assertQ("test minShouldMatch (top level optional and negative terms local mm=50%)",
+          req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks oil gold -stockade'}")
+          , "*[count(//doc)=3]");
+
       assertQ("test minShouldMatch (top level optional and negative terms mm=100%)",
           req("q", "stocks gold -stockade", // +(((text_sw:stock) (text_sw:oil) (text_sw:gold) -(text_sw:stockad))~2)
               "qf", "text_sw",
@@ -1244,6 +1244,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=1]");
 
+      assertQ("test minShouldMatch (top level optional and negative terms local mm=100%)",
+          req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='stocks gold -stockade'}")
+          , "*[count(//doc)=1]");
+
       assertQ("test minShouldMatch (top level required terms only)",
           req("q", "stocks AND oil", // +(+(text_sw:stock) +(text_sw:oil))
               "qf", "text_sw",
@@ -1252,6 +1256,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=1]");
 
+      assertQ("test minShouldMatch (top level required terms only) local mm=50%)",
+          req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='stocks AND oil'}")
+          , "*[count(//doc)=1]");
+
       assertQ("test minShouldMatch (top level optional and required terms)",
           req("q", "oil gold +stocks", // +(((text_sw:oil) (text_sw:gold) +(text_sw:stock))~1)
               "qf", "text_sw",
@@ -1260,6 +1268,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=3]");
 
+      assertQ("test minShouldMatch (top level optional and required terms) local mm=50%)",
+          req("q", "{!edismax qf=text_sw mm=50% sow=" + sow + " v='oil gold +stocks'}")
+          , "*[count(//doc)=3]");
+
       assertQ("test minShouldMatch (top level optional with explicit OR and parens)",
           req("q", "(snake OR stocks) oil",
               "qf", "text_sw",
@@ -1268,6 +1280,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=2]");
 
+      assertQ("test minShouldMatch (top level optional with explicit OR and parens) local mm=100%)",
+          req("q", "{!edismax qf=text_sw mm=100% sow=" + sow + " v='(snake OR stocks) oil'}")
+          , "*[count(//doc)=2]");
+
       // The results for these two appear odd, but are correct as per BooleanQuery processing.
       // See: http://searchhub.org/2011/12/28/why-not-and-or-and-not/
       // Non-parenthesis OR/AND precedence is not true to abstract boolean logic in solr when q.op = AND
@@ -1280,6 +1296,11 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "sow", sow,
               "defType", "edismax")
           , "*[count(//doc)=0]");
+
+      assertQ("test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)",
+          req("q", "{!edismax qf=text_sw q.op=OR mm=100% sow=" + sow + " v='snake OR stocks oil'}")
+          , "*[count(//doc)=0]");
+
       assertQ("test minShouldMatch (top level optional with explicit OR without parens)",
           req("q", "snake OR stocks oil",
               "qf", "text_sw",
@@ -1289,6 +1310,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "defType", "edismax")
           , "*[count(//doc)=0]");
 
+      assertQ("test minShouldMatch (top level optional with explicit OR without parens) local mm=100%)",
+          req("q", "{!edismax qf=text_sw q.op=AND mm=100% sow=" + sow + " v='snake OR stocks oil'}")
+          , "*[count(//doc)=0]");
+
       // SOLR-9174
       assertQ("test minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op",
           req("q", "barbie OR (hair AND nonexistentword)",
@@ -1297,6 +1322,10 @@ public class TestExtendedDismaxParser extends SolrTestCaseJ4 {
               "sow", sow,
               "defType", "edismax")
           , "*[count(//doc)=3]");
+
+      assertQ("test local minShouldMatch=1<-1 with explicit OR, one impossible clause, and no explicit q.op",
+          req("q", "{!edismax qf=text_sw mm=1<-1 sow=" + sow + " v='barbie OR (hair AND nonexistentword)'}")
+          , "*[count(//doc)=3]");
     }
   }
 


[lucene-solr] 32/47: SOLR-14498: BlockCache gets stuck not accepting new stores. Fix gradle check

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit de153211abb061b9e864af14fc66d6a4fca6f47d
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Thu May 28 08:12:30 2020 -0400

    SOLR-14498: BlockCache gets stuck not accepting new stores. Fix gradle check
---
 versions.lock  | 2 +-
 versions.props | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/versions.lock b/versions.lock
index 7b22ab6..0ef2ed8 100644
--- a/versions.lock
+++ b/versions.lock
@@ -11,7 +11,7 @@ com.fasterxml.jackson.core:jackson-annotations:2.10.1 (2 constraints: 331dcd4e)
 com.fasterxml.jackson.core:jackson-core:2.10.1 (3 constraints: 633586b7)
 com.fasterxml.jackson.core:jackson-databind:2.10.1 (3 constraints: 941aba96)
 com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.10.1 (1 constraints: 3605303b)
-com.github.ben-manes.caffeine:caffeine:2.8.0 (1 constraints: 0c050d36)
+com.github.ben-manes.caffeine:caffeine:2.8.4 (1 constraints: 10051136)
 com.github.jnr:jffi:1.2.18 (1 constraints: b20902ab)
 com.github.jnr:jnr-constants:0.9.12 (4 constraints: ed2c9d5d)
 com.github.jnr:jnr-enxio:0.19 (2 constraints: 2a167d08)
diff --git a/versions.props b/versions.props
index 6a1c142..de08c3c 100644
--- a/versions.props
+++ b/versions.props
@@ -5,7 +5,7 @@ com.cybozu.labs:langdetect=1.1-20120112
 com.drewnoakes:metadata-extractor=2.11.0
 com.epam:parso=2.0.11
 com.fasterxml.jackson*:*=2.10.1
-com.github.ben-manes.caffeine:caffeine=2.8.0
+com.github.ben-manes.caffeine:caffeine=2.8.4
 com.github.virtuald:curvesapi=1.06
 com.github.zafarkhaja:java-semver=0.9.0
 com.google.guava:guava=25.1-jre


[lucene-solr] 36/47: ref_guide - metrics reporting - small typo (#1544)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 2b7d2781cc027adfa400fd74432d9e5cc6633a45
Author: soleuu <so...@gmail.com>
AuthorDate: Fri May 29 14:47:36 2020 +0200

    ref_guide - metrics reporting - small typo (#1544)
---
 solr/solr-ref-guide/src/metrics-reporting.adoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/solr-ref-guide/src/metrics-reporting.adoc b/solr/solr-ref-guide/src/metrics-reporting.adoc
index c66017c..b319bb2 100644
--- a/solr/solr-ref-guide/src/metrics-reporting.adoc
+++ b/solr/solr-ref-guide/src/metrics-reporting.adoc
@@ -670,4 +670,4 @@ http://localhost:8983/solr/admin/metrics?regex=.*\.requests&group=core
 Request only "user.name" property of "system.properties" metric from registry "solr.jvm":
 
 [source,text]
-http://localhost:8983/solr/admin/metrics?wt=xml?key=solr.jvm:system.properties:user.name
+http://localhost:8983/solr/admin/metrics?wt=xml&key=solr.jvm:system.properties:user.name


[lucene-solr] 14/47: SOLR-13289: Use the final collector's scoreMode (#1517)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit dfa3a96f18af5cecbdb182dc31f677d0d1c51f93
Author: Tomas Fernandez Lobbe <tf...@apache.org>
AuthorDate: Thu May 21 15:48:37 2020 -0700

    SOLR-13289: Use the final collector's scoreMode (#1517)
    
    This is needed in case a PostFilter changes the scoreMode
---
 solr/CHANGES.txt                                   |   2 +-
 .../org/apache/solr/search/SolrIndexSearcher.java  |  14 +-
 .../apache/solr/search/SolrIndexSearcherTest.java  | 268 +++++++++++++++------
 .../solr/search/TestCollapseQParserPlugin.java     |  29 +++
 4 files changed, 239 insertions(+), 74 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 4185e9b..1e0fa23 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -136,7 +136,7 @@ Optimizations
 * SOLR-13289: When the "minExactHits" parameters is provided in queries and it's value is lower than the number of hits,
   Solr can speedup the query resolution by using the Block-Max WAND algorithm (see LUCENE-8135). When doing this, the
   value of matching documents in the response (numFound) will be an approximation.
-  (Ishan Chattopadhyaya, Munendra S N, Tomás Fernández Löbbe)
+  (Ishan Chattopadhyaya, Munendra S N, Tomás Fernández Löbbe, David Smiley)
 
 * SOLR-14472: Autoscaling "cores" preference now retrieves the core count more efficiently, and counts all cores.
   (David Smiley)
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index bf85d6c..af968d6 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -160,13 +160,14 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
         UninvertingReader.wrap(reader, core.getLatestSchema().getUninversionMapper()),
         SolrQueryTimeoutImpl.getInstance());
   }
-
+  
   /**
    * Builds the necessary collector chain (via delegate wrapping) and executes the query against it. This method takes
    * into consideration both the explicitly provided collector and postFilter as well as any needed collector wrappers
    * for dealing with options specified in the QueryCommand.
+   * @return The collector used for search
    */
-  private void buildAndRunCollectorChain(QueryResult qr, Query query, Collector collector, QueryCommand cmd,
+  private Collector buildAndRunCollectorChain(QueryResult qr, Query query, Collector collector, QueryCommand cmd,
       DelegatingCollector postFilter) throws IOException {
 
     EarlyTerminatingSortingCollector earlyTerminatingSortingCollector = null;
@@ -216,6 +217,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
     if (collector instanceof DelegatingCollector) {
       ((DelegatingCollector) collector).finish();
     }
+    return collector;
   }
 
   public SolrIndexSearcher(SolrCore core, String path, IndexSchema schema, SolrIndexConfig config, String name,
@@ -1580,11 +1582,15 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable, SolrI
         maxScoreCollector = new MaxScoreCollector();
         collector = MultiCollector.wrap(topCollector, maxScoreCollector);
       }
-      buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter);
+      ScoreMode scoreModeUsed = buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter).scoreMode();
 
       totalHits = topCollector.getTotalHits();
       TopDocs topDocs = topCollector.topDocs(0, len);
-      hitsRelation = topDocs.totalHits.relation;
+      if (scoreModeUsed == ScoreMode.COMPLETE || scoreModeUsed == ScoreMode.COMPLETE_NO_SCORES) {
+        hitsRelation = TotalHits.Relation.EQUAL_TO;
+      } else {
+        hitsRelation = topDocs.totalHits.relation;
+      }
       if (cmd.getSort() != null && query instanceof RankQuery == false && (cmd.getFlags() & GET_SCORES) != 0) {
         TopFieldCollector.populateScores(topDocs.scoreDocs, this, query);
       }
diff --git a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
index 309af8a..e670133 100644
--- a/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
+++ b/solr/core/src/test/org/apache/solr/search/SolrIndexSearcherTest.java
@@ -16,31 +16,51 @@
  */
 package org.apache.solr.search;
 
+import java.io.IOException;
+
 import org.apache.lucene.index.Term;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreMode;
 import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TotalHits;
+import org.apache.lucene.search.Weight;
 import org.apache.solr.SolrTestCaseJ4;
 import org.junit.Before;
 import org.junit.BeforeClass;
 
-import java.io.IOException;
-
 public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
-  
+
   private final static int NUM_DOCS = 20;
 
   @BeforeClass
   public static void setUpClass() throws Exception {
     initCore("solrconfig.xml", "schema.xml");
     for (int i = 0 ; i < NUM_DOCS ; i ++) {
-      assertU(adoc("id", String.valueOf(i), "field1_s", "foo", "field2_s", String.valueOf(i % 2), "field3_s", String.valueOf(i)));
-      assertU(commit());
+      assertU(adoc("id", String.valueOf(i),
+          "field1_s", "foo",
+          "field2_s", String.valueOf(i % 2),
+          "field3_i_dvo", String.valueOf(i),
+          "field4_t", numbersTo(i)));
+      assertU(commit()); //commit inside the loop to get multiple segments
     }
   }
   
+  private static String numbersTo(int i) {
+    StringBuilder numbers = new StringBuilder();
+    for (int j = 0; j <= i ; j++) {
+      numbers.append(String.valueOf(j) + " ");
+    }
+    return numbers.toString();
+  }
+
   @Before
   public void setUp() throws Exception {
-    assertU(adoc("id", "1", "field1_s", "foo", "field2_s", "1", "field3_s", "1"));
+    assertU(adoc("id", "1",
+        "field1_s", "foo",
+        "field2_s", "1",
+        "field3_i_dvo", "1",
+        "field4_t", numbersTo(1)));
     assertU(commit());
     super.setUp();
   }
@@ -72,129 +92,239 @@ public class SolrIndexSearcherTest extends SolrTestCaseJ4 {
         );
   }
   
-  private void assertMatchesEqual(int expectedCount, QueryResult qr) {
+  private void assertMatchesEqual(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException {
+    QueryResult qr = new QueryResult();
+    searcher.search(qr, cmd);
     assertEquals(expectedCount, qr.getDocList().matches());
     assertEquals(TotalHits.Relation.EQUAL_TO, qr.getDocList().hitCountRelation());
   }
   
-  private void assertMatchesGraterThan(int expectedCount, QueryResult qr) {
+  private QueryResult assertMatchesGreaterThan(int expectedCount, SolrIndexSearcher searcher, QueryCommand cmd) throws IOException {
+    QueryResult qr = new QueryResult();
+    searcher.search(qr, cmd);
     assertTrue("Expecting returned matches to be greater than " + expectedCount + " but got " + qr.getDocList().matches(),
         expectedCount >= qr.getDocList().matches());
     assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, qr.getDocList().hitCountRelation());
+    return qr;
   }
   
   public void testLowMinExactHitsGeneratesApproximation() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(NUM_DOCS / 2);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesGraterThan(NUM_DOCS, qr);
+      QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo");
+      assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
       return null;
     });
     
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(1);
-      cmd.setLen(1);
-      // We need to disable cache, otherwise the search will be done for 20 docs (cache window size) which brings up the minExactHits
-      cmd.setFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
-      cmd.setQuery(new TermQuery(new Term("field2_s", "1")));
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesGraterThan(NUM_DOCS/2, qr);
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field2_s", "1");
+      assertMatchesGreaterThan(NUM_DOCS/2, searcher, cmd);
       return null;
     });
   }
-  
+
   public void testHighMinExactHitsGeneratesExactCount() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(NUM_DOCS);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesEqual(NUM_DOCS, qr);
+      QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field1_s", "foo");
+      assertMatchesEqual(NUM_DOCS, searcher, cmd);
       return null;
     });
     
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(NUM_DOCS);
-      cmd.setQuery(new TermQuery(new Term("field2_s", "1")));
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesEqual(NUM_DOCS/2, qr);
+      QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 10, "field2_s", "1");
+      assertMatchesEqual(NUM_DOCS/2, searcher, cmd);
       return null;
     });
   }
+
+  
   
   public void testLowMinExactHitsWithQueryResultCache() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(NUM_DOCS / 2);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
+      QueryCommand cmd = createBasicQueryCommand(NUM_DOCS / 2, 10, "field1_s", "foo");
+      cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
       searcher.search(new QueryResult(), cmd);
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesGraterThan(NUM_DOCS, qr);
+      assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
       return null;
     });
   }
   
   public void testHighMinExactHitsWithQueryResultCache() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(NUM_DOCS);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
+      QueryCommand cmd = createBasicQueryCommand(NUM_DOCS, 2, "field1_s", "foo");
+      cmd.clearFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
       searcher.search(new QueryResult(), cmd);
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesEqual(NUM_DOCS, qr);
+      assertMatchesEqual(NUM_DOCS, searcher, cmd);
       return null;
     });
   }
   
   public void testMinExactHitsMoreRows() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(2);
-      cmd.setLen(NUM_DOCS);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesEqual(NUM_DOCS, qr);
+      QueryCommand cmd = createBasicQueryCommand(2, NUM_DOCS, "field1_s", "foo");
+      assertMatchesEqual(NUM_DOCS, searcher, cmd);
       return null;
     });
   }
   
   public void testMinExactHitsMatchWithDocSet() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
+      QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo");
+      assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
+      
       cmd.setNeedDocSet(true);
-      cmd.setMinExactHits(2);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
-      searcher.search(new QueryResult(), cmd);
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesEqual(NUM_DOCS, qr);
+      assertMatchesEqual(NUM_DOCS, searcher, cmd);
       return null;
     });
   }
   
   public void testMinExactHitsWithMaxScoreRequested() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      QueryCommand cmd = new QueryCommand();
-      cmd.setMinExactHits(2);
+      QueryCommand cmd = createBasicQueryCommand(2, 2, "field1_s", "foo");
       cmd.setFlags(SolrIndexSearcher.GET_SCORES);
-      cmd.setQuery(new TermQuery(new Term("field1_s", "foo")));
-      searcher.search(new QueryResult(), cmd);
-      QueryResult qr = new QueryResult();
-      searcher.search(qr, cmd);
-      assertMatchesGraterThan(NUM_DOCS, qr);
+      QueryResult qr = assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
       assertNotEquals(Float.NaN, qr.getDocList().maxScore());
       return null;
     });
   }
+  
+  public void testMinExactWithFilters() throws Exception {
+    
+    h.getCore().withSearcher(searcher -> {
+      //Sanity Check - No Filter
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0");
+      assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
+      return null;
+    });
+    
+    
+    h.getCore().withSearcher(searcher -> {
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0");
+      Query filterQuery = new TermQuery(new Term("field4_t", "19"));
+      cmd.setFilterList(filterQuery);
+      assertNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter);
+      assertMatchesEqual(1, searcher, cmd);
+      return null;
+    });
+  }
+  
+  public void testMinExactWithPostFilters() throws Exception {
+    h.getCore().withSearcher(searcher -> {
+      //Sanity Check - No Filter
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0");
+      assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
+      return null;
+    });
+    
+    
+    h.getCore().withSearcher(searcher -> {
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0");
+      MockPostFilter filterQuery = new MockPostFilter(1, 101);
+      cmd.setFilterList(filterQuery);
+      assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter);
+      assertMatchesEqual(1, searcher, cmd);
+      return null;
+    });
+    
+    h.getCore().withSearcher(searcher -> {
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0");
+      MockPostFilter filterQuery = new MockPostFilter(100, 101);
+      cmd.setFilterList(filterQuery);
+      assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter);
+      assertMatchesGreaterThan(NUM_DOCS, searcher, cmd);
+      return null;
+    });
+    
+  }
+  
+  public void testMinExactWithPostFilterThatChangesScoreMode() throws Exception {
+    h.getCore().withSearcher(searcher -> {
+      QueryCommand cmd = createBasicQueryCommand(1, 1, "field4_t", "0");
+      // Use ScoreMode.COMPLETE for the PostFilter
+      MockPostFilter filterQuery = new MockPostFilter(100, 101, ScoreMode.COMPLETE);
+      cmd.setFilterList(filterQuery);
+      assertNotNull(searcher.getProcessedFilter(null, cmd.getFilterList()).postFilter);
+      assertMatchesEqual(NUM_DOCS, searcher, cmd);
+      return null;
+    });
+  }
+
+  private QueryCommand createBasicQueryCommand(int minExactHits, int length, String field, String q) {
+    QueryCommand cmd = new QueryCommand();
+    cmd.setMinExactHits(minExactHits);
+    cmd.setLen(length);
+    cmd.setFlags(SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
+    cmd.setQuery(new TermQuery(new Term(field, q)));
+    return cmd;
+  }
+  
+  private final static class MockPostFilter  extends TermQuery implements PostFilter {
+    
+    private final int cost;
+    private final int maxDocsToCollect;
+    private final ScoreMode scoreMode;
+    
+    public MockPostFilter(int maxDocsToCollect, int cost, ScoreMode scoreMode) {
+      super(new Term("foo", "bar"));//The term won't really be used. just the collector
+      assert cost > 100;
+      this.cost = cost;
+      this.maxDocsToCollect = maxDocsToCollect;
+      this.scoreMode = scoreMode;
+    }
+
+    public MockPostFilter(int maxDocsToCollect, int cost) {
+      this(maxDocsToCollect, cost, null);
+    }
+    
+    @Override
+    public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
+      throw new UnsupportedOperationException("This class is only intended to be used as a PostFilter");
+    }
+
+    @Override
+    public boolean getCache() {
+      return false;
+    }
+
+    @Override
+    public void setCache(boolean cache) {}
+
+    @Override
+    public int getCost() {
+      return cost;
+    }
+
+    @Override
+    public void setCost(int cost) {}
+
+    @Override
+    public boolean getCacheSep() {
+      return false;
+    }
+
+    @Override
+    public void setCacheSep(boolean cacheSep) {
+    }
+
+    @Override
+    public DelegatingCollector getFilterCollector(IndexSearcher searcher) {
+      return new DelegatingCollector() {
+        private int collected = 0;
+        @Override
+        public void collect(int doc) throws IOException {
+          if (++collected <= maxDocsToCollect) {
+            super.collect(doc);
+          }
+        }
+        
+        @Override
+        public ScoreMode scoreMode() {
+          if (scoreMode != null) {
+            return scoreMode;
+          }
+          return super.scoreMode();
+        }
+      };
+    }
+    
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
index 09444e1..2c6f9c4 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
@@ -1039,4 +1039,33 @@ public class TestCollapseQParserPlugin extends SolrTestCaseJ4 {
     assertQEx("Should Fail For collapsing on Date fields", "Collapsing field should be of either String, Int or Float type",
         req("q", "*:*", "fq", "{!collapse field=group_dt}"), SolrException.ErrorCode.BAD_REQUEST);
   }
+  
+  @Test
+  public void testMinExactHitsDisabledByCollapse() throws Exception {
+    int numDocs = 10;
+    String collapseFieldInt = "field_ti_dv";
+    String collapseFieldFloat = "field_tf_dv";
+    String collapseFieldString = "field_s_dv";
+    for (int i = 0 ; i < numDocs ; i ++) {
+      assertU(adoc(
+          "id", String.valueOf(i),
+          "field_s", String.valueOf(i % 2),
+          collapseFieldInt, String.valueOf(i),
+          collapseFieldFloat, String.valueOf(i),
+          collapseFieldString, String.valueOf(i)));
+        assertU(commit());
+    }
+    
+    for (String collapseField : new String[] {collapseFieldInt, collapseFieldFloat, collapseFieldString}) {
+      assertQ(req(
+          "q", "{!cache=false}field_s:1",
+          "rows", "1",
+          "minExactHits", "1",
+          // this collapse will end up matching all docs
+          "fq", "{!collapse field=" + collapseField + " nullPolicy=expand}"// nullPolicy needed due to a bug when val=0
+          ),"//*[@numFoundExact='true']"
+          ,"//*[@numFound='" + (numDocs/2) + "']"
+          );
+    }
+  }
 }


[lucene-solr] 11/47: SOLR-14482: Fix or suppress warnings in solr/search/facet

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 63d9cff945bd510ae8fe35e6666cf08596fa38b8
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Thu May 21 08:59:32 2020 -0400

    SOLR-14482: Fix or suppress warnings in solr/search/facet
---
 solr/CHANGES.txt                                   |    3 +-
 .../apache/solr/cloud/ExclusiveSliceProperty.java  |    2 +-
 .../apache/solr/metrics/SolrMetricProducer.java    |    9 +-
 .../apache/solr/response/JSONResponseWriter.java   |    8 +-
 .../apache/solr/response/PythonResponseWriter.java |    2 +-
 .../apache/solr/response/RubyResponseWriter.java   |    2 +-
 .../java/org/apache/solr/search/CaffeineCache.java |    3 +-
 .../java/org/apache/solr/search/QueryContext.java  |    1 +
 .../src/java/org/apache/solr/search/SolrCache.java |    3 +-
 .../apache/solr/search/facet/AggValueSource.java   |    3 +-
 .../java/org/apache/solr/search/facet/AvgAgg.java  |   22 +-
 .../org/apache/solr/search/facet/CountAgg.java     |    6 +-
 .../org/apache/solr/search/facet/CountValsAgg.java |   18 +-
 .../org/apache/solr/search/facet/DocValuesAcc.java |  567 +++++------
 .../org/apache/solr/search/facet/FacetBucket.java  |   10 +-
 .../org/apache/solr/search/facet/FacetContext.java |   74 ++
 .../apache/solr/search/facet/FacetDebugInfo.java   |    2 +-
 .../org/apache/solr/search/facet/FacetField.java   |   45 +-
 .../apache/solr/search/facet/FacetFieldMerger.java |    7 +-
 .../solr/search/facet/FacetFieldProcessor.java     |   23 +-
 .../search/facet/FacetFieldProcessorByArray.java   |    4 +-
 .../search/facet/FacetFieldProcessorByArrayDV.java |    2 +-
 .../facet/FacetFieldProcessorByArrayUIF.java       |    2 +-
 .../FacetFieldProcessorByEnumTermsStream.java      |    3 +-
 .../search/facet/FacetFieldProcessorByHashDV.java  |    9 +-
 .../org/apache/solr/search/facet/FacetHeatmap.java |    8 +-
 .../org/apache/solr/search/facet/FacetModule.java  |  315 +++---
 .../org/apache/solr/search/facet/FacetParser.java  |  414 ++++++++
 .../apache/solr/search/facet/FacetProcessor.java   |   21 +-
 .../org/apache/solr/search/facet/FacetQuery.java   |    7 +-
 .../org/apache/solr/search/facet/FacetRange.java   | 1038 +-------------------
 .../apache/solr/search/facet/FacetRangeMerger.java |    4 +-
 .../apache/solr/search/facet/FacetRangeParser.java |   76 ++
 .../{FacetRange.java => FacetRangeProcessor.java}  |  278 +++---
 .../org/apache/solr/search/facet/FacetRequest.java |  493 +---------
 .../solr/search/facet/FacetRequestSorted.java      |   58 ++
 .../search/facet/FacetRequestSortedMerger.java     |   10 +-
 .../java/org/apache/solr/search/facet/HLLAgg.java  |   16 +-
 .../org/apache/solr/search/facet/LegacyFacet.java  |    1 +
 .../org/apache/solr/search/facet/MinMaxAgg.java    |   31 +-
 .../org/apache/solr/search/facet/MissingAgg.java   |    8 +-
 .../apache/solr/search/facet/PercentileAgg.java    |   18 +-
 .../apache/solr/search/facet/RelatednessAgg.java   |   16 +-
 .../solr/search/facet/SimpleAggValueSource.java    |    1 +
 .../java/org/apache/solr/search/facet/SlotAcc.java |  695 ++++++-------
 .../org/apache/solr/search/facet/StddevAgg.java    |   18 +-
 .../java/org/apache/solr/search/facet/SumAgg.java  |   18 +-
 .../org/apache/solr/search/facet/SumsqAgg.java     |   16 +-
 .../apache/solr/search/facet/UnInvertedField.java  |    4 +-
 .../solr/search/facet/UnInvertedFieldAcc.java      |  170 ++--
 .../org/apache/solr/search/facet/UniqueAgg.java    |   14 +-
 .../apache/solr/search/facet/UniqueBlockAgg.java   |    6 +-
 .../solr/search/facet/UniqueBlockFieldAgg.java     |    2 +-
 .../solr/search/facet/UniqueBlockQueryAgg.java     |    4 +-
 .../solr/search/facet/UniqueMultiDvSlotAcc.java    |    2 +-
 .../search/facet/UniqueMultivaluedSlotAcc.java     |    2 +-
 .../search/facet/UniqueSinglevaluedSlotAcc.java    |    2 +-
 .../apache/solr/search/facet/UniqueSlotAcc.java    |    4 +-
 .../org/apache/solr/search/facet/VarianceAgg.java  |   18 +-
 .../autoscaling/sim/TestSnapshotCloudManager.java  |    4 +-
 .../org/apache/solr/response/JSONWriterTest.java   |    4 +-
 .../org/apache/solr/search/facet/DebugAgg.java     |   10 +-
 .../solr/search/facet/TestJsonFacetRefinement.java |    2 +-
 63 files changed, 1888 insertions(+), 2750 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e2dc073..26e2cb3 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -240,8 +240,7 @@ Other Changes
 * SOLR-14226: Fix or suppress 14 resource leak warnings in apache/solr/core (Andras Salaman via
   Erick Erickson)
 
-* SOLR-14485: Fix or suppress 11 resource leak warnings in apache/solr/cloud (Andras Salaman via Erick Erickson)
-
+* SOLR-14482: Fix or suppress warnings in solr/search/facet (Erick Erickson)
 ==================  8.5.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java b/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java
index bd9de94..448f455 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ExclusiveSliceProperty.java
@@ -362,7 +362,7 @@ class ExclusiveSliceProperty {
       this.replica = replica;
     }
     public String toString() {
-      StringBuilder sb = new StringBuilder(System.lineSeparator()).append(System.lineSeparator()).append("******EOE20 starting toString of SliceReplica");
+      StringBuilder sb = new StringBuilder(System.lineSeparator()).append(System.lineSeparator());
       sb.append("    :").append(System.lineSeparator()).append("slice: ").append(slice.toString()).append(System.lineSeparator()).append("      replica: ").append(replica.toString()).append(System.lineSeparator());
       return sb.toString();
     }
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
index c321a11..a7f24fd 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
@@ -16,6 +16,8 @@
  */
 package org.apache.solr.metrics;
 
+import java.io.IOException;
+
 /**
  * Used by objects that expose metrics through {@link SolrMetricManager}.
  */
@@ -62,9 +64,14 @@ public interface SolrMetricProducer extends AutoCloseable {
    * Implementations should always call <code>SolrMetricProducer.super.close()</code> to ensure that
    * metrics with the same life-cycle as this component are properly unregistered. This prevents
    * obscure memory leaks.
+   *
+   * from: https://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html
+   * While this interface method is declared to throw Exception, implementers are strongly encouraged
+   * to declare concrete implementations of the close method to throw more specific exceptions, or to
+   * throw no exception at all if the close operation cannot fail.
    */
   @Override
-  default void close() throws Exception {
+  default void close() throws IOException {
     SolrMetricsContext context = getSolrMetricsContext();
     if (context == null) {
       return;
diff --git a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
index 596a05e..e89d15e 100644
--- a/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/JSONResponseWriter.java
@@ -78,7 +78,7 @@ public class JSONResponseWriter implements QueryResponseWriter {
     return new JSONWriter(writer, req, rsp);
   }
 
-}
+
 
 /**
  * Writes NamedLists directly as an array of NameTypeValue JSON objects...
@@ -248,9 +248,10 @@ class ArrayOfNameTypeValueJSONWriter extends JSONWriter {
   }
 }
 
-abstract class NaNFloatWriter extends JSONWriter {
-  
+abstract static class NaNFloatWriter extends JSONWriter {
+
   abstract protected String getNaN();
+
   abstract protected String getInf();
 
   public NaNFloatWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) {
@@ -283,3 +284,4 @@ abstract class NaNFloatWriter extends JSONWriter {
     }
   }
 }
+}
diff --git a/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java b/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java
index 98109df..be53ddb 100644
--- a/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/PythonResponseWriter.java
@@ -46,7 +46,7 @@ public class PythonResponseWriter implements QueryResponseWriter {
   }
 }
 
-class PythonWriter extends NaNFloatWriter {
+class PythonWriter extends JSONResponseWriter.NaNFloatWriter {
   @Override
   protected String getNaN() { return "float('NaN')"; }
   @Override
diff --git a/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java b/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java
index 6b73a7c..eb4c17d 100644
--- a/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/RubyResponseWriter.java
@@ -46,7 +46,7 @@ public void write(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) th
   }
 }
 
-class RubyWriter extends NaNFloatWriter {
+class RubyWriter extends JSONResponseWriter.NaNFloatWriter {
 
   @Override
   protected String getNaN() { return "(0.0/0.0)"; }
diff --git a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
index 82271ad..318e0e8 100644
--- a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
+++ b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.search;
 
+import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.time.Duration;
 import java.util.Collections;
@@ -228,7 +229,7 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
   }
 
   @Override
-  public void close() throws Exception {
+  public void close() throws IOException {
     SolrCache.super.close();
     cache.invalidateAll();
     cache.cleanUp();
diff --git a/solr/core/src/java/org/apache/solr/search/QueryContext.java b/solr/core/src/java/org/apache/solr/search/QueryContext.java
index 487feb4..ebffc32 100644
--- a/solr/core/src/java/org/apache/solr/search/QueryContext.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryContext.java
@@ -33,6 +33,7 @@ import org.slf4j.LoggerFactory;
  * instantiate it on demand (and the need to put "searcher" in the map)
  * @lucene.experimental
  */
+@SuppressWarnings("rawtypes")
 public class QueryContext extends IdentityHashMap implements Closeable {
   // private IdentityHashMap map;  // we are the map for now (for compat w/ ValueSource)
   private final SolrIndexSearcher searcher;
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java
index c37cf9e..4c770b5 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCache.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java
@@ -18,6 +18,7 @@ package org.apache.solr.search;
 
 import org.apache.solr.core.SolrInfoBean;
 
+import java.io.IOException;
 import java.util.Map;
 import java.util.function.Function;
 
@@ -150,7 +151,7 @@ public interface SolrCache<K,V> extends SolrInfoBean {
 
 
   /** Frees any non-memory resources */
-  default void close() throws Exception {
+  default void close() throws IOException {
     SolrInfoBean.super.close();
   }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java b/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java
index c633dbf..da83b91 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/AggValueSource.java
@@ -44,13 +44,14 @@ public abstract class AggValueSource extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     // FUTURE
     throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this);
   }
 
   // TODO: make abstract
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this);
   }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java b/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java
index e1a09a6..7036c30 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java
@@ -37,7 +37,7 @@ public class AvgAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -62,7 +62,7 @@ public class AvgAgg extends SimpleAggValueSource {
       }
       vs = sf.getType().getValueSource(sf, null);
     }
-    return new AvgSlotAcc(vs, fcontext, numSlots);
+    return new SlotAcc.AvgSlotAcc(vs, fcontext, numSlots);
   }
 
   @Override
@@ -70,12 +70,13 @@ public class AvgAgg extends SimpleAggValueSource {
     return new Merger();
   }
 
-  private static class Merger extends FacetDoubleMerger {
+  private static class Merger extends FacetModule.FacetDoubleMerger {
     long num;
     double sum;
 
     @Override
     public void merge(Object facetResult, Context mcontext1) {
+      @SuppressWarnings({"unchecked"})
       List<Number> numberList = (List<Number>) facetResult;
       num += numberList.get(0).longValue();
       sum += numberList.get(1).doubleValue();
@@ -88,10 +89,10 @@ public class AvgAgg extends SimpleAggValueSource {
     }
   }
 
-  class AvgSortedNumericAcc extends DoubleSortedNumericDVAcc {
+  class AvgSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc {
     int[] counts;
 
-    public AvgSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public AvgSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
       this.counts = new int[numSlots];
     }
@@ -114,6 +115,7 @@ public class AvgAgg extends SimpleAggValueSource {
     }
 
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public Object getValue(int slot) {
       if (fcontext.isShard()) {
         ArrayList lst = new ArrayList(2);
@@ -138,10 +140,10 @@ public class AvgAgg extends SimpleAggValueSource {
     }
   }
 
-  class AvgSortedSetAcc extends DoubleSortedSetDVAcc {
+  class AvgSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc {
     int[] counts;
 
-    public AvgSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public AvgSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
       this.counts = new int[numSlots];
     }
@@ -168,6 +170,7 @@ public class AvgAgg extends SimpleAggValueSource {
     }
 
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public Object getValue(int slot) {
       if (fcontext.isShard()) {
         ArrayList lst = new ArrayList(2);
@@ -192,10 +195,10 @@ public class AvgAgg extends SimpleAggValueSource {
     }
   }
 
-  class AvgUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
+  class AvgUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc {
     int[] counts;
 
-    public AvgUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public AvgUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
       this.counts = new int[numSlots];
     }
@@ -224,6 +227,7 @@ public class AvgAgg extends SimpleAggValueSource {
     }
 
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public Object getValue(int slot) {
       if (fcontext.isShard()) {
         ArrayList lst = new ArrayList(2);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java b/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java
index 527399c..e2f4e91 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/CountAgg.java
@@ -24,12 +24,12 @@ public class CountAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
-    return new CountSlotArrAcc(fcontext, numSlots);
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+    return new SlotAcc.CountSlotArrAcc(fcontext, numSlots);
   }
 
   @Override
   public FacetMerger createFacetMerger(Object prototype) {
-    return new FacetLongMerger();
+    return new FacetModule.FacetLongMerger();
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java b/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java
index 4923cc8..6415ff6 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java
@@ -37,7 +37,7 @@ public class CountValsAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
     if (vs instanceof FieldNameValueSource) {
       String field = ((FieldNameValueSource)vs).getFieldName();
@@ -64,12 +64,12 @@ public class CountValsAgg extends SimpleAggValueSource {
 
   @Override
   public FacetMerger createFacetMerger(Object prototype) {
-    return new FacetLongMerger();
+    return new FacetModule.FacetLongMerger();
   }
 
-  class CountValSlotAcc extends LongFuncSlotAcc {
+  class CountValSlotAcc extends SlotAcc.LongFuncSlotAcc {
 
-    public CountValSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
+    public CountValSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
       super(values, fcontext, numSlots, 0);
     }
 
@@ -81,9 +81,9 @@ public class CountValsAgg extends SimpleAggValueSource {
     }
   }
 
-  class CountSortedNumericDVAcc extends LongSortedNumericDVAcc {
+  class CountSortedNumericDVAcc extends DocValuesAcc.LongSortedNumericDVAcc {
 
-    public CountSortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public CountSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
@@ -93,9 +93,9 @@ public class CountValsAgg extends SimpleAggValueSource {
     }
   }
 
-  class CountSortedSetDVAcc extends LongSortedSetDVAcc {
+  class CountSortedSetDVAcc extends DocValuesAcc.LongSortedSetDVAcc {
 
-    public CountSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public CountSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
@@ -111,7 +111,7 @@ public class CountValsAgg extends SimpleAggValueSource {
     private int currentSlot;
     long[] result;
 
-    public CountMultiValuedAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public CountMultiValuedAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
       result = new long[numSlots];
     }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java b/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java
index 38c9f08..547040e 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java
@@ -39,7 +39,7 @@ import org.apache.solr.schema.SchemaField;
 public abstract class DocValuesAcc extends SlotAcc {
   SchemaField sf;
 
-  public DocValuesAcc(FacetRequest.FacetContext fcontext, SchemaField sf) throws IOException {
+  public DocValuesAcc(FacetContext fcontext, SchemaField sf) throws IOException {
     super(fcontext);
     this.sf = sf;
   }
@@ -58,368 +58,371 @@ public abstract class DocValuesAcc extends SlotAcc {
    * returns whether or not given {@code doc} has value
    */
   protected abstract boolean advanceExact(int doc) throws IOException;
-}
 
-/**
- * Accumulator for {@link NumericDocValues}
- */
-abstract class NumericDVAcc extends DocValuesAcc {
-  NumericDocValues values;
 
-  public NumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf) throws IOException {
-    super(fcontext, sf);
-  }
+  /**
+   * Accumulator for {@link NumericDocValues}
+   */
+  abstract class NumericDVAcc extends DocValuesAcc {
+    NumericDocValues values;
 
-  @Override
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-    super.setNextReader(readerContext);
-    values = DocValues.getNumeric(readerContext.reader(),  sf.getName());
-  }
+    public NumericDVAcc(FacetContext fcontext, SchemaField sf) throws IOException {
+      super(fcontext, sf);
+    }
 
-  @Override
-  protected boolean advanceExact(int doc) throws IOException {
-    return values.advanceExact(doc);
+    @Override
+    public void setNextReader(LeafReaderContext readerContext) throws IOException {
+      super.setNextReader(readerContext);
+      values = DocValues.getNumeric(readerContext.reader(), sf.getName());
+    }
+
+    @Override
+    protected boolean advanceExact(int doc) throws IOException {
+      return values.advanceExact(doc);
+    }
   }
-}
 
-/**
- * Accumulator for {@link SortedNumericDocValues}
- */
-abstract class SortedNumericDVAcc extends DocValuesAcc {
-  SortedNumericDocValues values;
+  /**
+   * Accumulator for {@link SortedNumericDocValues}
+   */
+  abstract static class SortedNumericDVAcc extends DocValuesAcc {
+    SortedNumericDocValues values;
 
-  public SortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
-    super(fcontext, sf);
-  }
+    public SortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+      super(fcontext, sf);
+    }
 
-  @Override
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-    super.setNextReader(readerContext);
-    values = DocValues.getSortedNumeric(readerContext.reader(),  sf.getName());
-  }
+    @Override
+    public void setNextReader(LeafReaderContext readerContext) throws IOException {
+      super.setNextReader(readerContext);
+      values = DocValues.getSortedNumeric(readerContext.reader(), sf.getName());
+    }
 
-  @Override
-  protected boolean advanceExact(int doc) throws IOException {
-    return values.advanceExact(doc);
+    @Override
+    protected boolean advanceExact(int doc) throws IOException {
+      return values.advanceExact(doc);
+    }
   }
-}
 
-abstract class LongSortedNumericDVAcc extends SortedNumericDVAcc {
-  long[] result;
-  long initialValue;
+  abstract static class LongSortedNumericDVAcc extends SortedNumericDVAcc {
+    long[] result;
+    long initialValue;
 
-  public LongSortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
-    super(fcontext, sf, numSlots);
-    this.result = new long[numSlots];
-    this.initialValue = initialValue;
-    if (initialValue != 0) {
-      Arrays.fill(result, initialValue);
+    public LongSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
+      super(fcontext, sf, numSlots);
+      this.result = new long[numSlots];
+      this.initialValue = initialValue;
+      if (initialValue != 0) {
+        Arrays.fill(result, initialValue);
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Long.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Long.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slotNum) throws IOException {
-    return result[slotNum];
-  }
+    @Override
+    public Object getValue(int slotNum) throws IOException {
+      return result[slotNum];
+    }
 
-  @Override
-  public void reset() throws IOException {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() throws IOException {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
+    @Override
+    public void resize(Resizer resizer) {
     this.result = resizer.resize(result, initialValue);
-  }
+    }
 
-}
+  }
 
-abstract class DoubleSortedNumericDVAcc extends SortedNumericDVAcc {
-  double[] result;
-  double initialValue;
+  abstract static class DoubleSortedNumericDVAcc extends SortedNumericDVAcc {
+    double[] result;
+    double initialValue;
 
-  public DoubleSortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
-    super(fcontext, sf, numSlots);
-    this.result = new double[numSlots];
-    this.initialValue = initialValue;
-    if (initialValue != 0) {
-      Arrays.fill(result, initialValue);
+    public DoubleSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
+      super(fcontext, sf, numSlots);
+      this.result = new double[numSlots];
+      this.initialValue = initialValue;
+      if (initialValue != 0) {
+        Arrays.fill(result, initialValue);
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slotNum) throws IOException {
-    return result[slotNum];
-  }
+    @Override
+    public Object getValue(int slotNum) throws IOException {
+      return result[slotNum];
+    }
 
-  @Override
-  public void reset() throws IOException {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() throws IOException {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
+    @Override
+    public void resize(Resizer resizer) {
     this.result = resizer.resize(result, initialValue);
+    }
+
+    /**
+     * converts given long value to double based on field type
+     */
+    protected double getDouble(long val) {
+      switch (sf.getType().getNumberType()) {
+        case INTEGER:
+        case LONG:
+        case DATE:
+          return val;
+        case FLOAT:
+          return NumericUtils.sortableIntToFloat((int) val);
+        case DOUBLE:
+          return NumericUtils.sortableLongToDouble(val);
+        default:
+          // this would never happen
+          return 0.0d;
+      }
+    }
+
   }
 
   /**
-   * converts given long value to double based on field type
+   * Base class for standard deviation and variance computation for fields with {@link SortedNumericDocValues}
    */
-  protected double getDouble(long val) {
-    switch (sf.getType().getNumberType()) {
-      case INTEGER:
-      case LONG:
-      case DATE:
-        return val;
-      case FLOAT:
-        return NumericUtils.sortableIntToFloat((int) val);
-      case DOUBLE:
-        return NumericUtils.sortableLongToDouble(val);
-      default:
-        // this would never happen
-        return 0.0d;
+  abstract static class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc {
+    int[] counts;
+    double[] sum;
+
+    public SDVSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+      super(fcontext, sf, numSlots, 0);
+      this.counts = new int[numSlots];
+      this.sum = new double[numSlots];
     }
-  }
-
-}
-
-/**
- * Base class for standard deviation and variance computation for fields with {@link SortedNumericDocValues}
- */
-abstract class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc {
-  int[] counts;
-  double[] sum;
-
-  public SDVSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
-    super(fcontext, sf, numSlots, 0);
-    this.counts = new int[numSlots];
-    this.sum = new double[numSlots];
-  }
 
-  @Override
-  protected void collectValues(int doc, int slot) throws IOException {
-    for (int i = 0, count = values.docValueCount(); i < count; i++) {
-      double val = getDouble(values.nextValue());
-      result[slot]+= val * val;
-      sum[slot]+= val;
-      counts[slot]++;
+    @Override
+    protected void collectValues(int doc, int slot) throws IOException {
+      for (int i = 0, count = values.docValueCount(); i < count; i++) {
+        double val = getDouble(values.nextValue());
+        result[slot] += val * val;
+        sum[slot] += val;
+        counts[slot]++;
+      }
     }
-  }
 
-  protected abstract double computeVal(int slot);
+    protected abstract double computeVal(int slot);
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(computeVal(slotA), computeVal(slotB));
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(computeVal(slotA), computeVal(slotB));
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    if (fcontext.isShard()) {
-      ArrayList lst = new ArrayList(3);
-      lst.add(counts[slot]);
-      lst.add(result[slot]);
-      lst.add(sum[slot]);
-      return lst;
-    } else {
-      return computeVal(slot);
+    @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public Object getValue(int slot) {
+      if (fcontext.isShard()) {
+        ArrayList lst = new ArrayList(3);
+        lst.add(counts[slot]);
+        lst.add(result[slot]);
+        lst.add(sum[slot]);
+        return lst;
+      } else {
+        return computeVal(slot);
+      }
     }
-  }
 
-  @Override
-  public void reset() throws IOException {
-    super.reset();
-    Arrays.fill(counts, 0);
-    Arrays.fill(sum, 0);
-  }
+    @Override
+    public void reset() throws IOException {
+      super.reset();
+      Arrays.fill(counts, 0);
+      Arrays.fill(sum, 0);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    super.resize(resizer);
+    @Override
+    public void resize(Resizer resizer) {
+      super.resize(resizer);
     this.counts = resizer.resize(counts, 0);
     this.sum = resizer.resize(sum, 0);
+    }
   }
-}
 
-/**
- * Accumulator for {@link SortedDocValues}
- */
-abstract class SortedDVAcc extends DocValuesAcc {
-  SortedDocValues values;
+  /**
+   * Accumulator for {@link SortedDocValues}
+   */
+  abstract class SortedDVAcc extends DocValuesAcc {
+    SortedDocValues values;
 
-  public SortedDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf) throws IOException {
-    super(fcontext, sf);
-  }
+    public SortedDVAcc(FacetContext fcontext, SchemaField sf) throws IOException {
+      super(fcontext, sf);
+    }
 
-  @Override
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-    super.setNextReader(readerContext);
-    values = DocValues.getSorted(readerContext.reader(), sf.getName());
-  }
+    @Override
+    public void setNextReader(LeafReaderContext readerContext) throws IOException {
+      super.setNextReader(readerContext);
+      values = DocValues.getSorted(readerContext.reader(), sf.getName());
+    }
 
-  @Override
-  protected boolean advanceExact(int doc) throws IOException {
-    return values.advanceExact(doc);
+    @Override
+    protected boolean advanceExact(int doc) throws IOException {
+      return values.advanceExact(doc);
+    }
   }
-}
 
-/**
- * Accumulator for {@link SortedSetDocValues}
- */
-abstract class SortedSetDVAcc extends DocValuesAcc {
-  SortedSetDocValues values;
+  /**
+   * Accumulator for {@link SortedSetDocValues}
+   */
+  abstract static class SortedSetDVAcc extends DocValuesAcc {
+    SortedSetDocValues values;
 
-  public SortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
-    super(fcontext, sf);
-  }
+    public SortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+      super(fcontext, sf);
+    }
 
-  @Override
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-    super.setNextReader(readerContext);
-    values = DocValues.getSortedSet(readerContext.reader(), sf.getName());
-  }
+    @Override
+    public void setNextReader(LeafReaderContext readerContext) throws IOException {
+      super.setNextReader(readerContext);
+      values = DocValues.getSortedSet(readerContext.reader(), sf.getName());
+    }
 
-  @Override
-  protected boolean advanceExact(int doc) throws IOException {
-    return values.advanceExact(doc);
+    @Override
+    protected boolean advanceExact(int doc) throws IOException {
+      return values.advanceExact(doc);
+    }
   }
-}
 
-abstract class LongSortedSetDVAcc extends SortedSetDVAcc {
-  long[] result;
-  long initialValue;
+  abstract static class LongSortedSetDVAcc extends SortedSetDVAcc {
+    long[] result;
+    long initialValue;
 
-  public LongSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
-    super(fcontext, sf, numSlots);
-    result = new long[numSlots];
-    this.initialValue = initialValue;
-    if (initialValue != 0) {
-      Arrays.fill(result, initialValue);
+    public LongSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
+      super(fcontext, sf, numSlots);
+      result = new long[numSlots];
+      this.initialValue = initialValue;
+      if (initialValue != 0) {
+        Arrays.fill(result, initialValue);
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Long.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Long.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slotNum) throws IOException {
-    return result[slotNum];
-  }
+    @Override
+    public Object getValue(int slotNum) throws IOException {
+      return result[slotNum];
+    }
 
-  @Override
-  public void reset() throws IOException {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() throws IOException {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
+    @Override
+    public void resize(Resizer resizer) {
     this.result = resizer.resize(result, initialValue);
+    }
   }
-}
 
-abstract class DoubleSortedSetDVAcc extends SortedSetDVAcc {
-  double[] result;
-  double initialValue;
+  abstract static class DoubleSortedSetDVAcc extends SortedSetDVAcc {
+    double[] result;
+    double initialValue;
 
-  public DoubleSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
-    super(fcontext, sf, numSlots);
-    result = new double[numSlots];
-    this.initialValue = initialValue;
-    if (initialValue != 0) {
-      Arrays.fill(result, initialValue);
+    public DoubleSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
+      super(fcontext, sf, numSlots);
+      result = new double[numSlots];
+      this.initialValue = initialValue;
+      if (initialValue != 0) {
+        Arrays.fill(result, initialValue);
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slotNum) throws IOException {
-    return result[slotNum];
-  }
+    @Override
+    public Object getValue(int slotNum) throws IOException {
+      return result[slotNum];
+    }
 
-  @Override
-  public void reset() throws IOException {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() throws IOException {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
+    @Override
+    public void resize(Resizer resizer) {
     this.result = resizer.resize(result, initialValue);
+    }
   }
-}
 
-/**
- * Base class for standard deviation and variance computation for fields with {@link SortedSetDocValues}
- */
-abstract class SDVSortedSetAcc extends DoubleSortedSetDVAcc {
-  int[] counts;
-  double[] sum;
-
-  public SDVSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
-    super(fcontext, sf, numSlots, 0);
-    this.counts = new int[numSlots];
-    this.sum = new double[numSlots];
-  }
+  /**
+   * Base class for standard deviation and variance computation for fields with {@link SortedSetDocValues}
+   */
+  abstract static class SDVSortedSetAcc extends DoubleSortedSetDVAcc {
+    int[] counts;
+    double[] sum;
+
+    public SDVSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+      super(fcontext, sf, numSlots, 0);
+      this.counts = new int[numSlots];
+      this.sum = new double[numSlots];
+    }
 
-  @Override
-  protected void collectValues(int doc, int slot) throws IOException {
-    long ord;
-    while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
-      BytesRef term = values.lookupOrd(ord);
-      Object obj = sf.getType().toObject(sf, term);
-      double val = obj instanceof Date ? ((Date)obj).getTime(): ((Number)obj).doubleValue();
-      result[slot] += val * val;
-      sum[slot] += val;
-      counts[slot]++;
+    @Override
+    protected void collectValues(int doc, int slot) throws IOException {
+      long ord;
+      while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+        BytesRef term = values.lookupOrd(ord);
+        Object obj = sf.getType().toObject(sf, term);
+        double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue();
+        result[slot] += val * val;
+        sum[slot] += val;
+        counts[slot]++;
+      }
     }
-  }
 
-  protected abstract double computeVal(int slot);
+    protected abstract double computeVal(int slot);
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(computeVal(slotA), computeVal(slotB));
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(computeVal(slotA), computeVal(slotB));
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    if (fcontext.isShard()) {
-      ArrayList lst = new ArrayList(3);
-      lst.add(counts[slot]);
-      lst.add(result[slot]);
-      lst.add(sum[slot]);
-      return lst;
-    } else {
-      return computeVal(slot);
+    @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public Object getValue(int slot) {
+      if (fcontext.isShard()) {
+        ArrayList lst = new ArrayList(3);
+        lst.add(counts[slot]);
+        lst.add(result[slot]);
+        lst.add(sum[slot]);
+        return lst;
+      } else {
+        return computeVal(slot);
+      }
     }
-  }
 
-  @Override
-  public void reset() throws IOException {
-    super.reset();
-    Arrays.fill(counts, 0);
-    Arrays.fill(sum, 0);
-  }
+    @Override
+    public void reset() throws IOException {
+      super.reset();
+      Arrays.fill(counts, 0);
+      Arrays.fill(sum, 0);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    super.resize(resizer);
+    @Override
+    public void resize(Resizer resizer) {
+      super.resize(resizer);
     this.counts = resizer.resize(counts, 0);
     this.sum = resizer.resize(sum, 0);
+    }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java b/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java
index ae1eba6..eadf60d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetBucket.java
@@ -24,14 +24,17 @@ import java.util.Map;
 import org.apache.solr.common.util.SimpleOrderedMap;
 
 public class FacetBucket {
-  final FacetBucketMerger parent;
+  @SuppressWarnings("rawtypes")
+  final FacetModule.FacetBucketMerger parent;
+  @SuppressWarnings({"rawtypes"})
   final Comparable bucketValue;
   final int bucketNumber;  // this is just for internal correlation (the first bucket created is bucket 0, the next bucket 1, across all field buckets)
 
   long count;
   Map<String, FacetMerger> subs;
 
-  public FacetBucket(FacetBucketMerger parent, Comparable bucketValue, FacetMerger.Context mcontext) {
+  public FacetBucket(@SuppressWarnings("rawtypes") FacetModule.FacetBucketMerger parent
+      , @SuppressWarnings("rawtypes") Comparable bucketValue, FacetMerger.Context mcontext) {
     this.parent = parent;
     this.bucketValue = bucketValue;
     this.bucketNumber = mcontext.getNewBucketNumber(); // TODO: we don't need bucket numbers for all buckets...
@@ -66,7 +69,7 @@ public class FacetBucket {
     return merger;
   }
 
-  public void mergeBucket(SimpleOrderedMap bucket, FacetMerger.Context mcontext) {
+  public void mergeBucket(@SuppressWarnings("rawtypes") SimpleOrderedMap bucket, FacetMerger.Context mcontext) {
     // todo: for refinements, we want to recurse, but not re-do stats for intermediate buckets
 
     mcontext.setShardFlag(bucketNumber);
@@ -93,6 +96,7 @@ public class FacetBucket {
   }
 
 
+  @SuppressWarnings({"rawtypes", "unchecked"})
   public SimpleOrderedMap getMergedBucket() {
     SimpleOrderedMap out = new SimpleOrderedMap( (subs == null ? 0 : subs.size()) + 2 );
     if (bucketValue != null) {
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetContext.java b/solr/core/src/java/org/apache/solr/search/facet/FacetContext.java
new file mode 100644
index 0000000..86aa3ad
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetContext.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+import java.util.Map;
+
+import org.apache.lucene.search.Query;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.search.DocSet;
+import org.apache.solr.search.QueryContext;
+import org.apache.solr.search.SolrIndexSearcher;
+
+public class FacetContext {
+  // Context info for actually executing a local facet command
+  public static final int IS_SHARD=0x01;
+  public static final int IS_REFINEMENT=0x02;
+  public static final int SKIP_FACET=0x04;  // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo
+
+  FacetProcessor<?> processor;
+  Map<String,Object> facetInfo; // refinement info for this node
+  QueryContext qcontext;
+  SolrQueryRequest req;  // TODO: replace with params?
+  SolrIndexSearcher searcher;
+  Query filter;  // TODO: keep track of as a DocSet or as a Query?
+  DocSet base;
+  FacetContext parent;
+  int flags;
+  FacetDebugInfo debugInfo;
+
+  public void setDebugInfo(FacetDebugInfo debugInfo) {
+    this.debugInfo = debugInfo;
+  }
+
+  public FacetDebugInfo getDebugInfo() {
+    return debugInfo;
+  }
+
+  public boolean isShard() {
+    return (flags & IS_SHARD) != 0;
+  }
+
+  /**
+   * @param filter The filter for the bucket that resulted in this context/domain.  Can be null if this is the root context.
+   * @param domain The resulting set of documents for this facet.
+   */
+  public FacetContext sub(Query filter, DocSet domain) {
+    FacetContext ctx = new FacetContext();
+    ctx.parent = this;
+    ctx.base = domain;
+    ctx.filter = filter;
+
+    // carry over from parent
+    ctx.flags = flags;
+    ctx.qcontext = qcontext;
+    ctx.req = req;
+    ctx.searcher = searcher;
+
+    return ctx;
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java b/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java
index 2be2fef..d6a3650 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetDebugInfo.java
@@ -65,7 +65,7 @@ public class FacetDebugInfo {
     return info;
   }
   
-  public SimpleOrderedMap getFacetDebugInfo() {
+  public SimpleOrderedMap<Object> getFacetDebugInfo() {
     SimpleOrderedMap<Object> info = new SimpleOrderedMap<>();
     
     if (filter != null) info.add("filter", filter);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetField.java b/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
index f2a3c2d..728cd6e 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetField.java
@@ -24,50 +24,6 @@ import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.NumberType;
 import org.apache.solr.schema.SchemaField;
 
-
-// Any type of facet request that generates a variable number of buckets
-// and the ability to sort by those generated buckets.
-abstract class FacetRequestSorted extends FacetRequest {
-  long offset;
-  long limit;
-  /** 
-   * Number of buckets to request beyond the limit to do internally during initial distributed search. 
-   * -1 means default heuristic.
-   */
-  int overrequest = -1;
-  /** 
-   * Number of buckets to fill in beyond the limit to do internally during refinement of distributed search. 
-   * -1 means default heuristic.
-   */
-  int overrefine = -1;
-  long mincount;
-  /** 
-   * The basic sorting to do on buckets, defaults to {@link FacetRequest.FacetSort#COUNT_DESC} 
-   * @see #prelim_sort
-   */
-  FacetSort sort;
-  /** 
-   * An optional "Pre-Sort" that defaults to null.
-   * If specified, then the <code>prelim_sort</code> is used as an optimization in place of {@link #sort} 
-   * during collection, and the full {@link #sort} values are only computed for the top candidate buckets 
-   * (after refinement)
-   */
-  FacetSort prelim_sort;
-  RefineMethod refine; // null, NONE, or SIMPLE
-
-  @Override
-  public RefineMethod getRefineMethod() {
-    return refine;
-  }
-
-  @Override
-  public boolean returnsPartial() {
-    return super.returnsPartial() || (limit > 0);
-  }
-
-}
-
-
 public class FacetField extends FacetRequestSorted {
   public static final int DEFAULT_FACET_LIMIT = 10;
   String field;
@@ -114,6 +70,7 @@ public class FacetField extends FacetRequestSorted {
   }
 
   @Override
+  @SuppressWarnings("rawtypes")
   public FacetProcessor createFacetProcessor(FacetContext fcontext) {
     SchemaField sf = fcontext.searcher.getSchema().getField(field);
     FieldType ft = sf.getType();
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
index f6276b5..a1c39cf 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldMerger.java
@@ -45,6 +45,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
   }
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void merge(Object facetResult, Context mcontext) {
     super.merge(facetResult, mcontext);
     if (numReturnedPerShard == null) {
@@ -53,7 +54,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
     merge((SimpleOrderedMap)facetResult, mcontext);
   }
 
-  protected void merge(SimpleOrderedMap facetResult, Context mcontext) {
+  protected void merge(@SuppressWarnings("rawtypes") SimpleOrderedMap facetResult, Context mcontext) {
     if (freq.missing) {
       Object o = facetResult.get("missing");
       if (o != null) {
@@ -74,6 +75,8 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
       }
     }
 
+
+    @SuppressWarnings({"unchecked", "rawtypes"})
     List<SimpleOrderedMap> bucketList = (List<SimpleOrderedMap>) facetResult.get("buckets");
     numReturnedPerShard[mcontext.shardNum] = bucketList.size();
     numReturnedBuckets += bucketList.size();
@@ -95,6 +98,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
 
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public Object getMergedResult() {
     SimpleOrderedMap result = new SimpleOrderedMap();
 
@@ -199,6 +203,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
     Set<Object> values;
 
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public void merge(Object facetResult, Context mcontext) {
       SimpleOrderedMap map = (SimpleOrderedMap)facetResult;
       long numBuckets = ((Number)map.get("numBuckets")).longValue();
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index 631fefc..7a261b5 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -41,7 +41,7 @@ import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.DocSet;
 import org.apache.solr.search.facet.SlotAcc.SlotContext;
 
-import static org.apache.solr.search.facet.FacetRequest.FacetContext.SKIP_FACET;
+import static org.apache.solr.search.facet.FacetContext.SKIP_FACET;
 
 /**
  * Facet processing based on field values. (not range nor by query)
@@ -69,7 +69,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
 
   SpecialSlotAcc allBucketsAcc;  // this can internally refer to otherAccs and/or collectAcc. setNextReader should be called on otherAccs directly if they exist.
 
-  FacetFieldProcessor(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
+  FacetFieldProcessor(FacetContext fcontext, FacetField freq, SchemaField sf) {
     super(fcontext, freq);
     this.sf = sf;
     this.effectiveMincount = (int)(fcontext.isShard() ? Math.min(1 , freq.mincount) : freq.mincount);
@@ -115,7 +115,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
 
     // allow a custom count acc to be used
     if (countAcc == null) {
-      countAcc = new CountSlotArrAcc(fcontext, slotCount);
+      countAcc = new SlotAcc.CountSlotArrAcc(fcontext, slotCount);
       countAcc.key = "count";
     }
 
@@ -162,7 +162,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
       if (indexOrderAcc == null) {
         // This sorting accumulator just goes by the slot number, so does not need to be collected
         // and hence does not need to find it's way into the accMap or accs array.
-        indexOrderAcc = new SortSlotAcc(fcontext);
+        indexOrderAcc = new SlotAcc.SortSlotAcc(fcontext);
       }
       return indexOrderAcc;
     }
@@ -178,7 +178,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     // we always count...
     // allow a subclass to set a custom counter.
     if (countAcc == null) {
-      countAcc = new CountSlotArrAcc(fcontext, numSlots);
+      countAcc = new SlotAcc.CountSlotArrAcc(fcontext, numSlots);
     }
 
     sortAcc = getTrivialSortingSlotAcc(this.sort);
@@ -292,8 +292,8 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
 
   /** Processes the collected data to finds the top slots, and composes it in the response NamedList. */
   SimpleOrderedMap<Object> findTopSlots(final int numSlots, final int slotCardinality,
-                                        IntFunction<Comparable> bucketValFromSlotNumFunc,
-                                        Function<Comparable, String> fieldQueryValFunc) throws IOException {
+                                        @SuppressWarnings("rawtypes") IntFunction<Comparable> bucketValFromSlotNumFunc,
+                                        @SuppressWarnings("rawtypes") Function<Comparable, String> fieldQueryValFunc) throws IOException {
     assert this.sortAcc != null;
     long numBuckets = 0;
 
@@ -437,6 +437,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
         sortedSlots = Arrays.copyOfRange(sortedSlots, off, endOffset);
       }
     }
+    @SuppressWarnings({"rawtypes"})
     List<SimpleOrderedMap> bucketList = new ArrayList<>(sortedSlots.length);
 
     for (Slot slot : sortedSlots) {
@@ -492,6 +493,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     int slot;
 
     /** filled in only once we know the bucket will either be involved in resorting, or returned */
+    @SuppressWarnings({"rawtypes"})
     Comparable bucketVal;
 
     /** Filled in if and only if needed for resorting, deferred stats, or subfacets */
@@ -690,7 +692,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
   static class MultiAcc extends SlotAcc {
     final SlotAcc[] subAccs;
 
-    MultiAcc(FacetRequest.FacetContext fcontext, SlotAcc[] subAccs) {
+    MultiAcc(FacetContext fcontext, SlotAcc[] subAccs) {
       super(fcontext);
       this.subAccs = subAccs;
     }
@@ -749,7 +751,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     int otherAccsSlot;
     long count;
 
-    SpecialSlotAcc(FacetRequest.FacetContext fcontext, SlotAcc collectAcc, int collectAccSlot, SlotAcc[] otherAccs, int otherAccsSlot) {
+    SpecialSlotAcc(FacetContext fcontext, SlotAcc collectAcc, int collectAccSlot, SlotAcc[] otherAccs, int otherAccsSlot) {
       super(fcontext);
       this.collectAcc = collectAcc;
       this.collectAccSlot = collectAccSlot;
@@ -839,10 +841,12 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
    "cat1":{"_l":["A"]}}}
    */
 
+  @SuppressWarnings({"unchecked"})
   static <T> List<T> asList(Object list) {
     return list != null ? (List<T>)list : Collections.EMPTY_LIST;
   }
 
+  @SuppressWarnings({"rawtypes", "unchecked"})
   protected SimpleOrderedMap<Object> refineFacets() throws IOException {
     boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
 
@@ -874,6 +878,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
     }
 
     // The only difference between skip and missing is the value of "skip" passed to refineBucket
+
     for (List bucketAndFacetInfo : partial) {
       assert bucketAndFacetInfo.size() == 2;
       Object bucketVal = bucketAndFacetInfo.get(0);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
index a018a87..318dbc7 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
@@ -28,7 +28,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.facet.SlotAcc.SlotContext;
 
-import static org.apache.solr.search.facet.FacetRequest.FacetContext.SKIP_FACET;
+import static org.apache.solr.search.facet.FacetContext.SKIP_FACET;
 
 /**
  * Base class for DV/UIF accumulating counts into an array by ordinal.  It's
@@ -45,7 +45,7 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
 
   int allBucketsSlot = -1;  // slot for the primary Accs (countAcc, collectAcc)
 
-  FacetFieldProcessorByArray(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
+  FacetFieldProcessorByArray(FacetContext fcontext, FacetField freq, SchemaField sf) {
     super(fcontext, freq, sf);
   }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
index 1443b5e..dfd1bc1 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayDV.java
@@ -46,7 +46,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
   SortedSetDocValues si;  // only used for term lookups (for both single and multi-valued)
   OrdinalMap ordinalMap = null; // maps per-segment ords to global ords
 
-  FacetFieldProcessorByArrayDV(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
+  FacetFieldProcessorByArrayDV(FacetContext fcontext, FacetField freq, SchemaField sf) {
     super(fcontext, freq, sf);
     multiValuedField = sf.multiValued() || sf.getType().multiValuedFieldCache();
   }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayUIF.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayUIF.java
index 4e70d40..6c90b3e 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayUIF.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArrayUIF.java
@@ -30,7 +30,7 @@ class FacetFieldProcessorByArrayUIF extends FacetFieldProcessorByArray {
   UnInvertedField uif;
   TermsEnum te;
 
-  FacetFieldProcessorByArrayUIF(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
+  FacetFieldProcessorByArrayUIF(FacetContext fcontext, FacetField freq, SchemaField sf) {
     super(fcontext, freq, sf);
     if (! sf.isUninvertible()) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
index c2c0a1f..746915b 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByEnumTermsStream.java
@@ -72,7 +72,7 @@ class FacetFieldProcessorByEnumTermsStream extends FacetFieldProcessor implement
   
   LeafReaderContext[] leaves;
 
-  FacetFieldProcessorByEnumTermsStream(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
+  FacetFieldProcessorByEnumTermsStream(FacetContext fcontext, FacetField freq, SchemaField sf) {
     super(fcontext, freq, sf);
   }
 
@@ -85,6 +85,7 @@ class FacetFieldProcessorByEnumTermsStream extends FacetFieldProcessor implement
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public void process() throws IOException {
     super.process();
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java
index 058cfd6..e39055b 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java
@@ -162,6 +162,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
 
     /** To be returned in "buckets"/"val" */
     @Override
+    @SuppressWarnings({"rawtypes"})
     public Comparable bitsToValue(long globalOrd) {
       BytesRef bytesRef = lookupOrdFunction.apply((int) globalOrd);
       // note FacetFieldProcessorByArray.findTopSlots also calls SchemaFieldType.toObject
@@ -169,16 +170,19 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
     }
 
     @Override
+    @SuppressWarnings({"rawtypes"})
     public String formatValue(Comparable val) {
       return (String) val;
     }
 
     @Override
+    @SuppressWarnings({"rawtypes"})
     protected Comparable parseStr(String rawval) throws ParseException {
       throw new UnsupportedOperationException();
     }
 
     @Override
+    @SuppressWarnings({"rawtypes"})
     protected Comparable parseAndAddGap(Comparable value, String gap) throws ParseException {
       throw new UnsupportedOperationException();
     }
@@ -189,7 +193,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
   LongCounts table;
   int allBucketsSlot = -1;
 
-  FacetFieldProcessorByHashDV(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
+  FacetFieldProcessorByHashDV(FacetContext fcontext, FacetField freq, SchemaField sf) {
     super(fcontext, freq, sf);
     if (freq.mincount == 0) {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
@@ -285,7 +289,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
       }
     };
 
-    countAcc = new CountSlotAcc(fcontext) {
+    countAcc = new SlotAcc.CountSlotAcc(fcontext) {
       @Override
       public void incrementCount(int slot, long count) {
         throw new UnsupportedOperationException();
@@ -437,6 +441,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
    */
   private IntFunction<SlotContext> slotContext = (slotNum) -> {
     long val = table.vals[slotNum];
+    @SuppressWarnings({"rawtypes"})
     Comparable value = calc.bitsToValue(val);
     return new SlotContext(sf.getType().getFieldQuery(null, sf, calc.formatValue(value)));
   };
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java b/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java
index a87e9f2..4d17df5 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetHeatmap.java
@@ -94,6 +94,7 @@ public class FacetHeatmap extends FacetRequest {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   static class Parser extends FacetParser<FacetHeatmap> {
+    @SuppressWarnings({"rawtypes"})
     Parser(FacetParser parent, String key) {
       super(parent, key);
     }
@@ -117,6 +118,7 @@ public class FacetHeatmap extends FacetRequest {
       final DistanceUnits distanceUnits;
       // note: the two instanceof conditions is not ideal, versus one. If we start needing to add more then refactor.
       if ((type instanceof AbstractSpatialPrefixTreeFieldType)) {
+        @SuppressWarnings({"rawtypes"})
         AbstractSpatialPrefixTreeFieldType rptType = (AbstractSpatialPrefixTreeFieldType) type;
         strategy = (PrefixTreeStrategy) rptType.getStrategy(fieldName);
         distanceUnits = rptType.getDistanceUnits();
@@ -204,17 +206,21 @@ public class FacetHeatmap extends FacetRequest {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FacetProcessor createFacetProcessor(FacetContext fcontext) {
     return new FacetHeatmapProcessor(fcontext);
   }
 
   // don't use an anonymous class since the getSimpleName() isn't friendly in debug output
+  @SuppressWarnings({"rawtypes"})
   private class FacetHeatmapProcessor extends FacetProcessor {
+    @SuppressWarnings({"unchecked"})
     public FacetHeatmapProcessor(FacetContext fcontext) {
       super(fcontext, FacetHeatmap.this);
     }
 
     @Override
+    @SuppressWarnings({"unchecked"})
     public void process() throws IOException {
       super.process(); // handles domain changes
 
@@ -233,7 +239,7 @@ public class FacetHeatmap extends FacetRequest {
       }
 
       //Populate response
-      response = new SimpleOrderedMap();
+      response = new SimpleOrderedMap<>();
       response.add("gridLevel", gridLevel);
       response.add("columns", heatmap.columns);
       response.add("rows", heatmap.rows);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
index 6eb10d7..0bd6651 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetModule.java
@@ -51,8 +51,8 @@ public class FacetModule extends SearchComponent {
   // The largest current flag in ShardRequest is 0x00002000
   // We'll put our bits in the middle to avoid future ones in ShardRequest and
   // custom ones that may start at the top.
-  public final static int PURPOSE_GET_JSON_FACETS      = 0x00100000;
-  public final static int PURPOSE_REFINE_JSON_FACETS   = 0x00200000;
+  public final static int PURPOSE_GET_JSON_FACETS = 0x00100000;
+  public final static int PURPOSE_REFINE_JSON_FACETS = 0x00200000;
 
   // Internal information passed down from the top level to shards for distributed faceting.
   private final static String FACET_INFO = "_facet_";
@@ -67,11 +67,12 @@ public class FacetModule extends SearchComponent {
 
 
   @Override
+  @SuppressWarnings({"unchecked"})
   public void prepare(ResponseBuilder rb) throws IOException {
-    Map<String,Object> json = rb.req.getJSON();
-    Map<String,Object> jsonFacet = null;
+    Map<String, Object> json = rb.req.getJSON();
+    Map<String, Object> jsonFacet = null;
     if (json == null) {
-      int version = rb.req.getParams().getInt("facet.version",1);
+      int version = rb.req.getParams().getInt("facet.version", 1);
       if (version <= 1) return;
       boolean facetsEnabled = rb.req.getParams().getBool(FacetParams.FACET, false);
       if (!facetsEnabled) return;
@@ -90,14 +91,15 @@ public class FacetModule extends SearchComponent {
     SolrParams params = rb.req.getParams();
 
     boolean isShard = params.getBool(ShardParams.IS_SHARD, false);
-    Map<String,Object> facetInfo = null;
+    @SuppressWarnings({"unchecked"})
+    Map<String, Object> facetInfo = null;
     if (isShard) {
       String jfacet = params.get(FACET_INFO);
       if (jfacet == null) {
         // if this is a shard request, but there is no _facet_ info, then don't do anything.
         return;
       }
-      facetInfo = (Map<String,Object>) fromJSONString(jfacet);
+      facetInfo = (Map<String, Object>) fromJSONString(jfacet);
     }
 
     // At this point, we know we need to do something.  Create and save the state.
@@ -118,6 +120,7 @@ public class FacetModule extends SearchComponent {
 
 
   @Override
+  @SuppressWarnings({"unchecked"})
   public void process(ResponseBuilder rb) throws IOException {
     // if this is null, faceting is not enabled
     FacetComponentState facetState = getFacetComponentState(rb);
@@ -125,17 +128,17 @@ public class FacetModule extends SearchComponent {
 
     boolean isShard = rb.req.getParams().getBool(ShardParams.IS_SHARD, false);
 
-    FacetRequest.FacetContext fcontext = new FacetRequest.FacetContext();
+    FacetContext fcontext = new FacetContext();
     fcontext.base = rb.getResults().docSet;
     fcontext.req = rb.req;
     fcontext.searcher = rb.req.getSearcher();
     fcontext.qcontext = QueryContext.newContext(fcontext.searcher);
     if (isShard) {
-      fcontext.flags |= FacetRequest.FacetContext.IS_SHARD;
-      fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map<String,Object>)facetState.facetInfo.get(FACET_REFINE);
+      fcontext.flags |= FacetContext.IS_SHARD;
+      fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map<String, Object>) facetState.facetInfo.get(FACET_REFINE);
       if (fcontext.facetInfo != null) {
-        fcontext.flags |= FacetRequest.FacetContext.IS_REFINEMENT;
-        fcontext.flags |= FacetRequest.FacetContext.SKIP_FACET; // the root bucket should have been received from all shards previously
+        fcontext.flags |= FacetContext.IS_REFINEMENT;
+        fcontext.flags |= FacetContext.SKIP_FACET; // the root bucket should have been received from all shards previously
       }
     }
     if (rb.isDebug()) {
@@ -170,7 +173,7 @@ public class FacetModule extends SearchComponent {
     }
 
     // Check if there are any refinements possible
-    if ((facetState.mcontext==null) ||facetState.mcontext.getSubsWithRefinement(facetState.facetRequest).isEmpty()) {
+    if ((facetState.mcontext == null) || facetState.mcontext.getSubsWithRefinement(facetState.facetRequest).isEmpty()) {
       clearFaceting(rb.outgoing);
       return ResponseBuilder.STAGE_DONE;
     }
@@ -187,7 +190,7 @@ public class FacetModule extends SearchComponent {
       facetState.mcontext.setShard(shard);
 
       // shard-specific refinement
-      Map<String,Object> refinement = facetState.merger.getRefinement(facetState.mcontext);
+      Map<String, Object> refinement = facetState.merger.getRefinement(facetState.mcontext);
       if (refinement == null) continue;
 
       boolean newRequest = false;
@@ -197,11 +200,10 @@ public class FacetModule extends SearchComponent {
       // If nshards becomes too great, we may want to move to hashing for
       // better scalability.
       for (ShardRequest sreq : rb.outgoing) {
-        if ( (sreq.purpose & (ShardRequest.PURPOSE_GET_FIELDS|ShardRequest.PURPOSE_REFINE_FACETS|ShardRequest.PURPOSE_REFINE_PIVOT_FACETS)) != 0
+        if ((sreq.purpose & (ShardRequest.PURPOSE_GET_FIELDS | ShardRequest.PURPOSE_REFINE_FACETS | ShardRequest.PURPOSE_REFINE_PIVOT_FACETS)) != 0
             && sreq.shards != null
             && sreq.shards.length == 1
-            && sreq.shards[0].equals(shard))
-        {
+            && sreq.shards[0].equals(shard)) {
           shardsRefineRequest = sreq;
           break;
         }
@@ -212,7 +214,7 @@ public class FacetModule extends SearchComponent {
         // so create one ourselves.
         newRequest = true;
         shardsRefineRequest = new ShardRequest();
-        shardsRefineRequest.shards = new String[] { shard };
+        shardsRefineRequest.shards = new String[]{shard};
         shardsRefineRequest.params = new ModifiableSolrParams(rb.req.getParams());
         // don't request any documents
         shardsRefineRequest.params.remove(CommonParams.START);
@@ -222,7 +224,7 @@ public class FacetModule extends SearchComponent {
 
       shardsRefineRequest.purpose |= PURPOSE_REFINE_JSON_FACETS;
 
-      Map<String,Object> finfo = new HashMap<>(1);
+      Map<String, Object> finfo = new HashMap<>(1);
       finfo.put(FACET_REFINE, refinement);
 
       // String finfoStr = JSONUtil.toJSON(finfo, -1);  // this doesn't handle formatting of Date objects the way we want
@@ -232,7 +234,7 @@ public class FacetModule extends SearchComponent {
         public void handleUnknownClass(Object o) {
           // handle date formatting correctly
           if (o instanceof Date) {
-            String s = ((Date)o).toInstant().toString();
+            String s = ((Date) o).toInstant().toString();
             writeString(s);
             return;
           }
@@ -254,7 +256,7 @@ public class FacetModule extends SearchComponent {
   }
 
   @Override
-  public void modifyRequest(ResponseBuilder rb, SearchComponent who,ShardRequest sreq) {
+  public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) {
     FacetComponentState facetState = getFacetComponentState(rb);
     if (facetState == null) return;
 
@@ -264,8 +266,8 @@ public class FacetModule extends SearchComponent {
     } else {
       // turn off faceting on other requests
       /*** distributedProcess will need to use other requests for refinement
-      sreq.params.remove("json.facet");  // this just saves space... the presence of FACET_INFO really control the faceting
-      sreq.params.remove(FACET_INFO);
+       sreq.params.remove("json.facet");  // this just saves space... the presence of FACET_INFO really control the faceting
+       sreq.params.remove(FACET_INFO);
        **/
     }
   }
@@ -281,15 +283,15 @@ public class FacetModule extends SearchComponent {
       if (top == null) continue; // shards.tolerant=true will cause this to happen on exceptions/errors
       Object facet = top.get("facets");
       if (facet == null) {
-        SimpleOrderedMap shardResponseHeader = (SimpleOrderedMap)rsp.getResponse().get("responseHeader");
-        if(Boolean.TRUE.equals(shardResponseHeader.getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) {
+        @SuppressWarnings("rawtypes") SimpleOrderedMap shardResponseHeader = (SimpleOrderedMap) rsp.getResponse().get("responseHeader");
+        if (Boolean.TRUE.equals(shardResponseHeader.getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) {
           rb.rsp.getResponseHeader().asShallowMap().put(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE);
         }
         continue;
       }
       if (facetState.merger == null) {
         facetState.merger = facetState.facetRequest.createFacetMerger(facet);
-        facetState.mcontext = new FacetMerger.Context( sreq.responses.size() );
+        facetState.mcontext = new FacetMerger.Context(sreq.responses.size());
       }
 
       if ((sreq.purpose & PURPOSE_REFINE_JSON_FACETS) != 0) {
@@ -297,14 +299,14 @@ public class FacetModule extends SearchComponent {
         // call merge again with a diff flag set on the context???
         facetState.mcontext.root = facet;
         facetState.mcontext.setShard(shardRsp.getShard());  // TODO: roll newShard into setShard?
-        facetState.merger.merge(facet , facetState.mcontext);
+        facetState.merger.merge(facet, facetState.mcontext);
         return;
       }
 
       // System.err.println("MERGING FACET RESULT FROM SHARD = " + facet);
       facetState.mcontext.root = facet;
       facetState.mcontext.newShard(shardRsp.getShard());
-      facetState.merger.merge(facet , facetState.mcontext);
+      facetState.merger.merge(facet, facetState.mcontext);
     }
   }
 
@@ -330,182 +332,181 @@ public class FacetModule extends SearchComponent {
   public Category getCategory() {
     return Category.QUERY;
   }
-}
 
 
-// TODO: perhaps factor out some sort of root/parent facet object that doesn't depend
+  // TODO: perhaps factor out some sort of root/parent facet object that doesn't depend
 // on stuff like ResponseBuilder, but contains request parameters,
 // root filter lists (for filter exclusions), etc?
-class FacetComponentState {
-  ResponseBuilder rb;
-  Map<String,Object> facetCommands;
-  FacetRequest facetRequest;
-  boolean isShard;
-  Map<String,Object> facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests
-
-  //
-  // Only used for distributed search
-  //
-  FacetMerger merger;
-  FacetMerger.Context mcontext;
-}
-
-// base class for facet functions that can be used in a sort
-abstract class FacetSortableMerger extends FacetMerger {
-  public void prepareSort() {
+  class FacetComponentState {
+    ResponseBuilder rb;
+    Map<String, Object> facetCommands;
+    FacetRequest facetRequest;
+    boolean isShard;
+    Map<String, Object> facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests
+
+    //
+    // Only used for distributed search
+    //
+    FacetMerger merger;
+    FacetMerger.Context mcontext;
   }
 
-  @Override
-  public void finish(Context mcontext) {
-    // nothing to do for simple stats...
-  }
-
-  /** Return the normal comparison sort order.  The sort direction is only to be used in special circumstances (such as making NaN sort
-   * last regardless of sort order.)  Normal sorters do not need to pay attention to direction.
-   */
-  public abstract int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction);
-}
-
-abstract class FacetDoubleMerger extends FacetSortableMerger {
-  @Override
-  public abstract void merge(Object facetResult, Context mcontext);
+  // base class for facet functions that can be used in a sort
+  abstract static class FacetSortableMerger extends FacetMerger {
+    public void prepareSort() {
+    }
 
-  protected abstract double getDouble();
+    @Override
+    public void finish(Context mcontext) {
+      // nothing to do for simple stats...
+    }
 
-  @Override
-  public Object getMergedResult() {
-    return getDouble();
+    /**
+     * Return the normal comparison sort order.  The sort direction is only to be used in special circumstances (such as making NaN sort
+     * last regardless of sort order.)  Normal sorters do not need to pay attention to direction.
+     */
+    public abstract int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction);
   }
 
+  abstract static class FacetDoubleMerger extends FacetSortableMerger {
+    @Override
+    public abstract void merge(Object facetResult, Context mcontext);
 
-  @Override
-  public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
-    return compare(getDouble(), ((FacetDoubleMerger)other).getDouble(), direction);
-  }
-
+    protected abstract double getDouble();
 
-  public static int compare(double a, double b, FacetRequest.SortDirection direction) {
-    if (a < b) return -1;
-    if (a > b) return 1;
-
-    if (a != a) {  // a==NaN
-      if (b != b) {
-        return 0;  // both NaN
-      }
-      return -1 * direction.getMultiplier();  // asc==-1, so this will put NaN at end of sort
-    }
-
-    if (b != b) { // b is NaN so a is greater
-      return 1 * direction.getMultiplier();  // if sorting asc, make a less so NaN is at end
+    @Override
+    public Object getMergedResult() {
+      return getDouble();
     }
 
-    // consider +-0 to be equal
-    return 0;
-  }
-}
-
 
+    @Override
+    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+      return compare(getDouble(), ((FacetDoubleMerger) other).getDouble(), direction);
+    }
 
 
+    public static int compare(double a, double b, FacetRequest.SortDirection direction) {
+      if (a < b) return -1;
+      if (a > b) return 1;
 
-class FacetLongMerger extends FacetSortableMerger {
-  long val;
+      if (a != a) {  // a==NaN
+        if (b != b) {
+          return 0;  // both NaN
+        }
+        return -1 * direction.getMultiplier();  // asc==-1, so this will put NaN at end of sort
+      }
 
-  @Override
-  public void merge(Object facetResult, Context mcontext) {
-    val += ((Number)facetResult).longValue();
-  }
+      if (b != b) { // b is NaN so a is greater
+        return 1 * direction.getMultiplier();  // if sorting asc, make a less so NaN is at end
+      }
 
-  @Override
-  public Object getMergedResult() {
-    return val;
+      // consider +-0 to be equal
+      return 0;
+    }
   }
 
-  @Override
-  public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
-    return Long.compare(val, ((FacetLongMerger)other).val);
-  }
-}
+  static class FacetLongMerger extends FacetSortableMerger {
+    long val;
 
+    @Override
+    public void merge(Object facetResult, Context mcontext) {
+      val += ((Number) facetResult).longValue();
+    }
 
-// base class for facets that create buckets (and can hence have sub-facets)
-abstract class FacetBucketMerger<FacetRequestT extends FacetRequest> extends FacetMerger {
-  FacetRequestT freq;
+    @Override
+    public Object getMergedResult() {
+      return val;
+    }
 
-  public FacetBucketMerger(FacetRequestT freq) {
-    this.freq = freq;
+    @Override
+    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+      return Long.compare(val, ((FacetLongMerger) other).val);
+    }
   }
 
-  /** Bucketval is the representative value for the bucket.  Only applicable to terms and range queries to distinguish buckets. */
-  FacetBucket newBucket(Comparable bucketVal, Context mcontext) {
-    return new FacetBucket(this, bucketVal, mcontext);
-  }
 
-  @Override
-  public Map<String, Object> getRefinement(Context mcontext) {
-    Collection<String> refineTags = mcontext.getSubsWithRefinement(freq);
-    return null; // FIXME
-  }
+  // base class for facets that create buckets (and can hence have sub-facets)
+  abstract static class FacetBucketMerger<FacetRequestT extends FacetRequest> extends FacetMerger {
+    FacetRequestT freq;
 
-  // do subs...
+    public FacetBucketMerger(FacetRequestT freq) {
+      this.freq = freq;
+    }
 
-  // callback stuff for buckets?
-  // passing object gives us a chance to specialize based on value
-  FacetMerger createFacetMerger(String key, Object val) {
-    FacetRequest sub = freq.getSubFacets().get(key);
-    if (sub != null) {
-      return sub.createFacetMerger(val);
+    /**
+     * Bucketval is the representative value for the bucket.  Only applicable to terms and range queries to distinguish buckets.
+     */
+    FacetBucket newBucket(@SuppressWarnings("rawtypes") Comparable bucketVal, Context mcontext) {
+      return new FacetBucket(this, bucketVal, mcontext);
     }
 
-    AggValueSource subStat = freq.getFacetStats().get(key);
-    if (subStat != null) {
-      return subStat.createFacetMerger(val);
+    @Override
+    public Map<String, Object> getRefinement(Context mcontext) {
+      Collection<String> refineTags = mcontext.getSubsWithRefinement(freq);
+      return null; // FIXME
     }
 
-    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no merger for key=" + key + " , val=" + val);
-  }
-}
+    // do subs...
 
+    // callback stuff for buckets?
+    // passing object gives us a chance to specialize based on value
+    FacetMerger createFacetMerger(String key, Object val) {
+      FacetRequest sub = freq.getSubFacets().get(key);
+      if (sub != null) {
+        return sub.createFacetMerger(val);
+      }
 
-class FacetQueryMerger extends FacetBucketMerger<FacetQuery> {
-  FacetBucket bucket;
+      AggValueSource subStat = freq.getFacetStats().get(key);
+      if (subStat != null) {
+        return subStat.createFacetMerger(val);
+      }
 
-  public FacetQueryMerger(FacetQuery freq) {
-    super(freq);
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no merger for key=" + key + " , val=" + val);
+    }
   }
 
-  @Override
-  public void merge(Object facet, Context mcontext) {
-    if (bucket == null) {
-      bucket = newBucket(null, mcontext);
+
+  static class FacetQueryMerger extends FacetBucketMerger<FacetQuery> {
+    FacetBucket bucket;
+
+    public FacetQueryMerger(FacetQuery freq) {
+      super(freq);
     }
-    bucket.mergeBucket((SimpleOrderedMap) facet, mcontext);
-  }
 
-  @Override
-  public Map<String, Object> getRefinement(Context mcontext) {
-    Collection<String> tags;
-    if (mcontext.bucketWasMissing()) {
-      // if this bucket was missing, we need to get all subfacets that have partials (that need to list values for refinement)
-      tags = mcontext.getSubsWithPartial(freq);
-    } else {
-      tags = mcontext.getSubsWithRefinement(freq);
+    @Override
+    public void merge(Object facet, Context mcontext) {
+      if (bucket == null) {
+        bucket = newBucket(null, mcontext);
+      }
+      bucket.mergeBucket((SimpleOrderedMap) facet, mcontext);
     }
 
-    Map<String,Object> refinement = bucket.getRefinement(mcontext, tags);
+    @Override
+    public Map<String, Object> getRefinement(Context mcontext) {
+      Collection<String> tags;
+      if (mcontext.bucketWasMissing()) {
+        // if this bucket was missing, we need to get all subfacets that have partials (that need to list values for refinement)
+        tags = mcontext.getSubsWithPartial(freq);
+      } else {
+        tags = mcontext.getSubsWithRefinement(freq);
+      }
 
-    return refinement;
-  }
+      Map<String, Object> refinement = bucket.getRefinement(mcontext, tags);
 
+      return refinement;
+    }
 
-  @Override
-  public void finish(Context mcontext) {
-    // FIXME we need to propagate!!!
-  }
 
-  @Override
-  public Object getMergedResult() {
-    return bucket.getMergedBucket();
+    @Override
+    public void finish(Context mcontext) {
+      // FIXME we need to propagate!!!
+    }
+
+    @Override
+    public Object getMergedResult() {
+      return bucket.getMergedBucket();
+    }
   }
 }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java b/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java
new file mode 100644
index 0000000..308228b
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java
@@ -0,0 +1,414 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.search.FunctionQParser;
+import org.apache.solr.search.SyntaxError;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+abstract class FacetParser<FacetRequestT extends FacetRequest> {
+  protected FacetRequestT facet;
+  protected FacetParser<?> parent;
+  protected String key;
+
+  public FacetParser(FacetParser<?> parent, String key) {
+    this.parent = parent;
+    this.key = key;
+  }
+
+  public String getKey() {
+    return key;
+  }
+
+  public String getPathStr() {
+    if (parent == null) {
+      return "/" + key;
+    }
+    return parent.getKey() + "/" + key;
+  }
+
+  protected RuntimeException err(String msg) {
+    return new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg + " , path="+getPathStr());
+  }
+
+  public abstract FacetRequest parse(Object o) throws SyntaxError;
+
+  // TODO: put the FacetRequest on the parser object?
+  public void parseSubs(Object o) throws SyntaxError {
+    if (o==null) return;
+    if (o instanceof Map) {
+      @SuppressWarnings({"unchecked"})
+      Map<String,Object> m = (Map<String, Object>) o;
+      for (Map.Entry<String,Object> entry : m.entrySet()) {
+        String key = entry.getKey();
+        Object value = entry.getValue();
+
+        if ("processEmpty".equals(key)) {
+          facet.processEmpty = getBoolean(m, "processEmpty", false);
+          continue;
+        }
+
+        // "my_prices" : { "range" : { "field":...
+        // key="my_prices", value={"range":..
+
+        Object parsedValue = parseFacetOrStat(key, value);
+
+        // TODO: have parseFacetOrStat directly add instead of return?
+        if (parsedValue instanceof FacetRequest) {
+          facet.addSubFacet(key, (FacetRequest)parsedValue);
+        } else if (parsedValue instanceof AggValueSource) {
+          facet.addStat(key, (AggValueSource)parsedValue);
+        } else {
+          throw err("Unknown facet type key=" + key + " class=" + (parsedValue == null ? "null" : parsedValue.getClass().getName()));
+        }
+      }
+    } else {
+      // facet : my_field?
+      throw err("Expected map for facet/stat");
+    }
+  }
+
+  public Object parseFacetOrStat(String key, Object o) throws SyntaxError {
+
+    if (o instanceof String) {
+      return parseStringFacetOrStat(key, (String)o);
+    }
+
+    if (!(o instanceof Map)) {
+      throw err("expected Map but got " + o);
+    }
+
+    // The type can be in a one element map, or inside the args as the "type" field
+    // { "query" : "foo:bar" }
+    // { "range" : { "field":... } }
+    // { "type"  : range, field : myfield, ... }
+    @SuppressWarnings({"unchecked"})
+    Map<String,Object> m = (Map<String,Object>)o;
+    String type;
+    Object args;
+
+    if (m.size() == 1) {
+      Map.Entry<String,Object> entry = m.entrySet().iterator().next();
+      type = entry.getKey();
+      args = entry.getValue();
+      // throw err("expected facet/stat type name, like {range:{... but got " + m);
+    } else {
+      // type should be inside the map as a parameter
+      Object typeObj = m.get("type");
+      if (!(typeObj instanceof String)) {
+        throw err("expected facet/stat type name, like {type:range, field:price, ...} but got " + typeObj);
+      }
+      type = (String)typeObj;
+      args = m;
+    }
+
+    return parseFacetOrStat(key, type, args);
+  }
+
+  public Object parseFacetOrStat(String key, String type, Object args) throws SyntaxError {
+    // TODO: a place to register all these facet types?
+
+    switch (type) {
+      case "field":
+      case "terms":
+        return new FacetRequest.FacetFieldParser(this, key).parse(args);
+      case "query":
+        return new FacetRequest.FacetQueryParser(this, key).parse(args);
+      case "range":
+        return new FacetRangeParser(this, key).parse(args);
+      case "heatmap":
+        return new FacetHeatmap.Parser(this, key).parse(args);
+      case "func":
+        return parseStat(key, args);
+    }
+
+    throw err("Unknown facet or stat. key=" + key + " type=" + type + " args=" + args);
+  }
+
+  public Object parseStringFacetOrStat(String key, String s) throws SyntaxError {
+    // "avg(myfield)"
+    return parseStat(key, s);
+    // TODO - simple string representation of facets
+  }
+
+  /** Parses simple strings like "avg(x)" in the context of optional local params (may be null) */
+  private AggValueSource parseStatWithParams(String key, SolrParams localparams, String stat) throws SyntaxError {
+    SolrQueryRequest req = getSolrRequest();
+    FunctionQParser parser = new FunctionQParser(stat, localparams, req.getParams(), req);
+    AggValueSource agg = parser.parseAgg(FunctionQParser.FLAG_DEFAULT);
+    return agg;
+  }
+
+  /** Parses simple strings like "avg(x)" or robust Maps that may contain local params */
+  private AggValueSource parseStat(String key, Object args) throws SyntaxError {
+    assert null != args;
+
+    if (args instanceof CharSequence) {
+      // Both of these variants are already unpacked for us in this case, and use no local params...
+      // 1) x:{func:'min(foo)'}
+      // 2) x:'min(foo)'
+      return parseStatWithParams(key, null, args.toString());
+    }
+
+    if (args instanceof Map) {
+      @SuppressWarnings({"unchecked"})
+      final Map<String,Object> statMap = (Map<String,Object>)args;
+      return parseStatWithParams(key, jsonToSolrParams(statMap), statMap.get("func").toString());
+    }
+
+    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+        "Stats must be specified as either a simple string, or a json Map");
+
+  }
+
+
+  private FacetRequest.Domain getDomain() {
+    if (facet.domain == null) {
+      facet.domain = new FacetRequest.Domain();
+    }
+    return facet.domain;
+  }
+
+  protected void parseCommonParams(Object o) {
+    if (o instanceof Map) {
+      @SuppressWarnings({"unchecked"})
+      Map<String,Object> m = (Map<String,Object>)o;
+      List<String> excludeTags = getStringList(m, "excludeTags");
+      if (excludeTags != null) {
+        getDomain().excludeTags = excludeTags;
+      }
+
+      Object domainObj =  m.get("domain");
+      if (domainObj instanceof Map) {
+        @SuppressWarnings({"unchecked"})
+        Map<String, Object> domainMap = (Map<String, Object>)domainObj;
+        FacetRequest.Domain domain = getDomain();
+
+        excludeTags = getStringList(domainMap, "excludeTags");
+        if (excludeTags != null) {
+          domain.excludeTags = excludeTags;
+        }
+
+        if (domainMap.containsKey("query")) {
+          domain.explicitQueries = parseJSONQueryStruct(domainMap.get("query"));
+          if (null == domain.explicitQueries) {
+            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+                "'query' domain can not be null or empty");
+          } else if (null != domain.excludeTags) {
+            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+                "'query' domain can not be combined with 'excludeTags'");
+          }
+        }
+
+        String blockParent = getString(domainMap, "blockParent", null);
+        String blockChildren = getString(domainMap, "blockChildren", null);
+
+        if (blockParent != null) {
+          domain.toParent = true;
+          domain.parents = blockParent;
+        } else if (blockChildren != null) {
+          domain.toChildren = true;
+          domain.parents = blockChildren;
+        }
+
+        FacetRequest.Domain.JoinField.createJoinField(domain, domainMap);
+        FacetRequest.Domain.GraphField.createGraphField(domain, domainMap);
+
+        Object filterOrList = domainMap.get("filter");
+        if (filterOrList != null) {
+          assert domain.filters == null;
+          domain.filters = parseJSONQueryStruct(filterOrList);
+        }
+
+      } else if (domainObj != null) {
+        throw err("Expected Map for 'domain', received " + domainObj.getClass().getSimpleName() + "=" + domainObj);
+      }
+    }
+  }
+
+  /** returns null on null input, otherwise returns a list of the JSON query structures -- either
+   * directly from the raw (list) input, or if raw input is a not a list then it encapsulates
+   * it in a new list.
+   */
+  @SuppressWarnings({"unchecked"})
+  private List<Object> parseJSONQueryStruct(Object raw) {
+    List<Object> result = null;
+    if (null == raw) {
+      return result;
+    } else if (raw instanceof List) {
+      result = (List<Object>) raw;
+    } else {
+      result = new ArrayList<>(1);
+      result.add(raw);
+    }
+    return result;
+  }
+
+  public String getField(Map<String,Object> args) {
+    Object fieldName = args.get("field"); // TODO: pull out into defined constant
+    if (fieldName == null) {
+      fieldName = args.get("f");  // short form
+    }
+    if (fieldName == null) {
+      throw err("Missing 'field'");
+    }
+
+    if (!(fieldName instanceof String)) {
+      throw err("Expected string for 'field', got" + fieldName);
+    }
+
+    return (String)fieldName;
+  }
+
+
+  public Long getLongOrNull(Map<String,Object> args, String paramName, boolean required) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      if (required) {
+        throw err("Missing required parameter '" + paramName + "'");
+      }
+      return null;
+    }
+    if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
+      throw err("Expected integer type for param '"+paramName + "' but got " + o);
+    }
+
+    return ((Number)o).longValue();
+  }
+
+  public long getLong(Map<String,Object> args, String paramName, long defVal) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      return defVal;
+    }
+    if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
+      throw err("Expected integer type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
+    }
+
+    return ((Number)o).longValue();
+  }
+
+  public Double getDoubleOrNull(Map<String,Object> args, String paramName, boolean required) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      if (required) {
+        throw err("Missing required parameter '" + paramName + "'");
+      }
+      return null;
+    }
+    if (!(o instanceof Number)) {
+      throw err("Expected double type for param '" + paramName + "' but got " + o);
+    }
+
+    return ((Number)o).doubleValue();
+  }
+
+  public boolean getBoolean(Map<String,Object> args, String paramName, boolean defVal) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      return defVal;
+    }
+    // TODO: should we be more flexible and accept things like "true" (strings)?
+    // Perhaps wait until the use case comes up.
+    if (!(o instanceof Boolean)) {
+      throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
+    }
+
+    return (Boolean)o;
+  }
+
+  public Boolean getBooleanOrNull(Map<String, Object> args, String paramName) {
+    Object o = args.get(paramName);
+
+    if (o != null && !(o instanceof Boolean)) {
+      throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
+    }
+    return (Boolean) o;
+  }
+
+
+  public String getString(Map<String,Object> args, String paramName, String defVal) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      return defVal;
+    }
+    if (!(o instanceof String)) {
+      throw err("Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
+    }
+
+    return (String)o;
+  }
+
+  public Object getVal(Map<String, Object> args, String paramName, boolean required) {
+    Object o = args.get(paramName);
+    if (o == null && required) {
+      throw err("Missing required parameter: '" + paramName + "'");
+    }
+    return o;
+  }
+
+  public List<String> getStringList(Map<String,Object> args, String paramName) {
+    return getStringList(args, paramName, true);
+  }
+
+@SuppressWarnings({"unchecked"})
+  public List<String> getStringList(Map<String, Object> args, String paramName, boolean decode) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      return null;
+    }
+    if (o instanceof List) {
+      return (List<String>)o;
+    }
+    if (o instanceof String) {
+      // TODO: SOLR-12539 handle spaces in b/w comma & value ie, should the values be trimmed before returning??
+      return StrUtils.splitSmart((String)o, ",", decode);
+    }
+
+    throw err("Expected list of string or comma separated string values for '" + paramName +
+        "', received " + o.getClass().getSimpleName() + "=" + o);
+  }
+
+  public IndexSchema getSchema() {
+    return parent.getSchema();
+  }
+
+  public SolrQueryRequest getSolrRequest() {
+    return parent.getSolrRequest();
+  }
+
+  /**
+   * Helper that handles the possibility of map values being lists
+   * NOTE: does *NOT* fail on map values that are sub-maps (ie: nested json objects)
+   */
+  @SuppressWarnings({"unchecked", "rawtypes"})
+  public static SolrParams jsonToSolrParams(Map jsonObject) {
+    // HACK, but NamedList already handles the list processing for us...
+    NamedList<String> nl = new NamedList<>();
+    nl.addAll(jsonObject);
+    return SolrParams.toSolrParams(nl);
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
index c1043f9..c3d84eb 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetProcessor.java
@@ -47,21 +47,21 @@ import org.apache.solr.search.facet.SlotAcc.SlotContext;
 /** Base abstraction for a class that computes facets. This is fairly internal to the module. */
 public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
   SimpleOrderedMap<Object> response;
-  FacetRequest.FacetContext fcontext;
+  FacetContext fcontext;
   FacetRequestT freq;
 
   DocSet filter;  // additional filters specified by "filter"  // TODO: do these need to be on the context to support recomputing during multi-select?
   LinkedHashMap<String,SlotAcc> accMap;
   SlotAcc[] accs;
-  CountSlotAcc countAcc;
+  SlotAcc.CountSlotAcc countAcc;
 
-  FacetProcessor(FacetRequest.FacetContext fcontext, FacetRequestT freq) {
+  FacetProcessor(FacetContext fcontext, FacetRequestT freq) {
     this.fcontext = fcontext;
     this.freq = freq;
     fcontext.processor = this;
   }
 
-  public Object getResponse() {
+  public org.apache.solr.common.MapWriter getResponse() {
     return response;
   }
 
@@ -74,7 +74,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     this.filter = fcontext.searcher.getDocSet(evalJSONFilterQueryStruct(fcontext, freq.domain.filters));
   }
   
-  private static List<Query> evalJSONFilterQueryStruct(FacetRequest.FacetContext fcontext, List<Object> filters) throws IOException {
+  private static List<Query> evalJSONFilterQueryStruct(FacetContext fcontext, List<Object> filters) throws IOException {
     List<Query> qlist = new ArrayList<>(filters.size());
     // TODO: prevent parsing filters each time!
     for (Object rawFilter : filters) {
@@ -82,6 +82,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
         qlist.add(parserFilter((String) rawFilter, fcontext.req));
       } else if (rawFilter instanceof Map) {
 
+        @SuppressWarnings({"unchecked"})
         Map<String,Object> m = (Map<String, Object>) rawFilter;
         String type;
         Object args;
@@ -181,6 +182,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
       return;
     }
 
+    @SuppressWarnings({"rawtypes"})
     Map tagMap = (Map) fcontext.req.getContext().get("tags");
     if (tagMap == null) {
       // no filters were tagged
@@ -226,7 +228,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
 
     // now walk back up the context tree
     // TODO: we lose parent exclusions...
-    for (FacetRequest.FacetContext curr = fcontext; curr != null; curr = curr.parent) {
+    for (FacetContext curr = fcontext; curr != null; curr = curr.parent) {
       if (curr.filter != null) {
         qlist.add( curr.filter );
       }
@@ -307,7 +309,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
 
     // allow a custom count acc to be used
     if (countAcc == null) {
-      countAcc = new CountSlotArrAcc(fcontext, slotCount);
+      countAcc = new SlotAcc.CountSlotArrAcc(fcontext, slotCount);
       countAcc.key = "count";
     }
 
@@ -438,6 +440,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
     }
   }
 
+  @SuppressWarnings({"unchecked"})
   void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain, boolean skip, Map<String,Object> facetInfo) throws IOException {
 
     boolean emptyDomain = domain == null || domain.size() == 0;
@@ -462,9 +465,9 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest>  {
       if (skip && facetInfoSub == null) continue;
 
       // make a new context for each sub-facet since they can change the domain
-      FacetRequest.FacetContext subContext = fcontext.sub(filter, domain);
+      FacetContext subContext = fcontext.sub(filter, domain);
       subContext.facetInfo = facetInfoSub;
-      if (!skip) subContext.flags &= ~FacetRequest.FacetContext.SKIP_FACET;  // turn off the skip flag if we're not skipping this bucket
+      if (!skip) subContext.flags &= ~FacetContext.SKIP_FACET;  // turn off the skip flag if we're not skipping this bucket
 
       if (fcontext.getDebugInfo() != null) {   // if fcontext.debugInfo != null, it means rb.debug() == true
         FacetDebugInfo fdebug = new FacetDebugInfo();
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
index 1d5a330..4365776 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetQuery.java
@@ -27,6 +27,7 @@ public class FacetQuery extends FacetRequest {
   // query string or query?
   Query q;
 
+  @SuppressWarnings("rawtypes")
   @Override
   public FacetProcessor createFacetProcessor(FacetContext fcontext) {
     return new FacetQueryProcessor(fcontext, this);
@@ -34,7 +35,7 @@ public class FacetQuery extends FacetRequest {
 
   @Override
   public FacetMerger createFacetMerger(Object prototype) {
-    return new FacetQueryMerger(this);
+    return new FacetModule.FacetQueryMerger(this);
   }
   
   @Override
@@ -49,7 +50,7 @@ public class FacetQuery extends FacetRequest {
 
 
 class FacetQueryProcessor extends FacetProcessor<FacetQuery> {
-  FacetQueryProcessor(FacetRequest.FacetContext fcontext, FacetQuery freq) {
+  FacetQueryProcessor(FacetContext fcontext, FacetQuery freq) {
     super(fcontext, freq);
   }
 
@@ -61,7 +62,7 @@ class FacetQueryProcessor extends FacetProcessor<FacetQuery> {
       // FIXME - what needs to be done here?
     }
     response = new SimpleOrderedMap<>();
-    fillBucket(response, freq.q, null, (fcontext.flags & FacetRequest.FacetContext.SKIP_FACET)!=0, fcontext.facetInfo);
+    fillBucket(response, freq.q, null, (fcontext.flags & FacetContext.SKIP_FACET)!=0, fcontext.facetInfo);
   }
 
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
index b02834c..a3b8949 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
@@ -16,38 +16,15 @@
  */
 package org.apache.solr.search.facet;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Date;
 import java.util.EnumSet;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.search.Query;
-import org.apache.lucene.util.NumericUtils;
-import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
 import org.apache.solr.common.params.FacetParams.FacetRangeOther;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.schema.CurrencyFieldType;
-import org.apache.solr.schema.CurrencyValue;
-import org.apache.solr.schema.DateRangeField;
-import org.apache.solr.schema.ExchangeRateProvider;
-import org.apache.solr.schema.FieldType;
-import org.apache.solr.schema.SchemaField;
-import org.apache.solr.schema.TrieDateField;
-import org.apache.solr.schema.TrieField;
-import org.apache.solr.search.DocSet;
-import org.apache.solr.search.SyntaxError;
-import org.apache.solr.search.facet.SlotAcc.SlotContext;
-import org.apache.solr.util.DateMathParser;
-
-import static org.apache.solr.search.facet.FacetRequest.FacetContext.SKIP_FACET;
 
 public class FacetRange extends FacetRequestSorted {
   static final String ACTUAL_END_JSON_KEY = "_actual_end";
-  
   String field;
   Object start;
   Object end;
@@ -64,6 +41,7 @@ public class FacetRange extends FacetRequestSorted {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FacetProcessor createFacetProcessor(FacetContext fcontext) {
     return new FacetRangeProcessor(fcontext, this);
   }
@@ -72,7 +50,7 @@ public class FacetRange extends FacetRequestSorted {
   public FacetMerger createFacetMerger(Object prototype) {
     return new FacetRangeMerger(this);
   }
-  
+
   @Override
   public Map<String, Object> getFacetDescription() {
     Map<String, Object> descr = new HashMap<>();
@@ -86,1016 +64,4 @@ public class FacetRange extends FacetRequestSorted {
     }
     return descr;
   }
-  
-}
-
-
-class FacetRangeProcessor extends FacetProcessor<FacetRange> {
-  // TODO: the code paths for initial faceting, vs refinement, are very different...
-  // TODO: ...it might make sense to have seperate classes w/a common base?
-  // TODO: let FacetRange.createFacetProcessor decide which one to instantiate?
-  
-  final SchemaField sf;
-  final Calc calc;
-  final EnumSet<FacetRangeInclude> include;
-  final long effectiveMincount;
-  final Comparable start;
-  final Comparable end;
-  final String gap;
-  final Object ranges;
-
-  /** Build by {@link #createRangeList} if and only if needed for basic faceting */
-  List<Range> rangeList;
-  /** Build by {@link #createRangeList} if and only if needed for basic faceting */
-  List<Range> otherList;
-
-  /**
-   * Serves two purposes depending on the type of request.
-   * <ul>
-   * <li>If this is a phase#1 shard request, then {@link #createRangeList} will set this value (non null)
-   *     if and only if it is needed for refinement (ie: <code>hardend:false</code> &amp; <code>other</code>
-   *     that requires an end value low/high value calculation).  And it wil be included in the response</li>
-   * <li>If this is a phase#2 refinement request, this variable will be used 
-   *     {@link #getOrComputeActualEndForRefinement} to track the value sent with the refinement request 
-   *     -- or to cache a recomputed value if the request omitted it -- for use in refining the 
-   *     <code>other</code> buckets that need them</li>
-   * </ul>
-   */
-  Comparable actual_end = null; // null until/unless we need it
-
-  FacetRangeProcessor(FacetRequest.FacetContext fcontext, FacetRange freq) {
-    super(fcontext, freq);
-    include = freq.include;
-    sf = fcontext.searcher.getSchema().getField(freq.field);
-    calc = getCalcForField(sf);
-    if (freq.ranges != null && (freq.start != null || freq.end != null || freq.gap != null)) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-          "Cannot set gap/start/end and ranges params together");
-    }
-    if (freq.ranges != null) {
-      ranges = freq.ranges;
-      start = null;
-      end = null;
-      gap = null;
-    } else {
-      start = calc.getValue(freq.start.toString());
-      end = calc.getValue(freq.end.toString());
-      gap = freq.gap.toString();
-      ranges = null;
-    }
-
-    // Under the normal mincount=0, each shard will need to return 0 counts since we don't calculate buckets at the top level.
-    // If mincount>0 then we could *potentially* set our sub mincount to 1...
-    // ...but that would require sorting the buckets (by their val) at the top level
-    //
-    // Rather then do that, which could be complicated by non trivial field types, we'll force the sub-shard effectiveMincount
-    // to be 0, ensuring that we can trivially merge all the buckets from every shard
-    // (we have to filter the merged buckets by the original mincount either way)
-    effectiveMincount = fcontext.isShard() ? 0 : freq.mincount;
-  }
-
-  @Override
-  public void process() throws IOException {
-    super.process();
-
-    if (fcontext.facetInfo != null) { // refinement?
-      response = refineFacets();
-    } else {
-      // phase#1: build list of all buckets and return full facets...
-      createRangeList();
-      response = getRangeCountsIndexed();
-    }
-  }
-
-  private static class Range {
-    Object label;
-    Comparable low;
-    Comparable high;
-    boolean includeLower;
-    boolean includeUpper;
-
-    public Range(Object label, Comparable low, Comparable high, boolean includeLower, boolean includeUpper) {
-      this.label = label;
-      this.low = low;
-      this.high = high;
-      this.includeLower = includeLower;
-      this.includeUpper = includeUpper;
-    }
-  }
-
-  /**
-   * Returns a {@link Calc} instance to use for <em>term</em> faceting over a numeric field.
-   * This method is unused for <code>range</code> faceting, and exists solely as a helper method for other classes
-   * 
-   * @param sf A field to facet on, must be of a type such that {@link FieldType#getNumberType} is non null
-   * @return a <code>Calc</code> instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field.
-   * @see FacetFieldProcessorByHashDV
-   */
-  public static Calc getNumericCalc(SchemaField sf) {
-    Calc calc;
-    final FieldType ft = sf.getType();
-
-    if (ft instanceof TrieField || ft.isPointField()) {
-      switch (ft.getNumberType()) {
-        case FLOAT:
-          calc = new FloatCalc(sf);
-          break;
-        case DOUBLE:
-          calc = new DoubleCalc(sf);
-          break;
-        case INTEGER:
-          calc = new IntCalc(sf);
-          break;
-        case LONG:
-          calc = new LongCalc(sf);
-          break;
-        case DATE:
-          calc = new DateCalc(sf, null);
-          break;
-        default:
-          throw new SolrException
-              (SolrException.ErrorCode.BAD_REQUEST,
-                  "Expected numeric field type :" + sf);
-      }
-    } else {
-      throw new SolrException
-          (SolrException.ErrorCode.BAD_REQUEST,
-              "Expected numeric field type :" + sf);
-    }
-    return calc;
-  }
-
-  /**
-   * Helper method used in processor constructor
-   * @return a <code>Calc</code> instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field.
-   */
-  private static Calc getCalcForField(SchemaField sf) {
-    final FieldType ft = sf.getType();
-    if (ft instanceof TrieField || ft.isPointField()) {
-      switch (ft.getNumberType()) {
-        case FLOAT:
-          return new FloatCalc(sf);
-        case DOUBLE:
-          return new DoubleCalc(sf);
-        case INTEGER:
-          return new IntCalc(sf);
-        case LONG:
-          return new LongCalc(sf);
-        case DATE:
-          return new DateCalc(sf, null);
-        default:
-          throw new SolrException
-              (SolrException.ErrorCode.BAD_REQUEST,
-                  "Unable to range facet on numeric field of unexpected type:" + sf.getName());
-      }
-    } else if (ft instanceof CurrencyFieldType) {
-      return new CurrencyCalc(sf);
-    } else if (ft instanceof DateRangeField) {
-      return new DateCalc(sf, null);
-    }
-
-    // if we made it this far, we have no idea what it is...
-    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                            "Unable to range facet on field:" + sf.getName());
-  }
-
-  private void createRangeList() throws IOException {
-
-    rangeList = new ArrayList<>();
-    otherList = new ArrayList<>(3);
-
-    Comparable low = start;
-    Comparable loop_end = this.end;
-
-    if (ranges != null) {
-      rangeList.addAll(parseRanges(ranges));
-      return;
-    }
-
-    while (low.compareTo(end) < 0) {
-      Comparable high = calc.addGap(low, gap);
-      if (end.compareTo(high) < 0) {
-        if (freq.hardend) {
-          high = loop_end;
-        } else {
-          loop_end = high;
-        }
-      }
-      if (high.compareTo(low) < 0) {
-        throw new SolrException
-            (SolrException.ErrorCode.BAD_REQUEST,
-                "range facet infinite loop (is gap negative? did the math overflow?)");
-      }
-      if (high.compareTo(low) == 0) {
-        throw new SolrException
-            (SolrException.ErrorCode.BAD_REQUEST,
-                "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high);
-      }
-
-      boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
-          (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
-      boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-          (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end)));
-
-      Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper);
-      rangeList.add( range );
-
-      low = high;
-    }
-
-    // no matter what other values are listed, we don't do
-    // anything if "none" is specified.
-    if (! freq.others.contains(FacetRangeOther.NONE) ) {
-      final boolean all = freq.others.contains(FacetRangeOther.ALL);
-
-      if (all || freq.others.contains(FacetRangeOther.BEFORE)) {
-        otherList.add( buildBeforeRange() );
-      }
-      if (all || freq.others.contains(FacetRangeOther.AFTER)) {
-        actual_end = loop_end;
-        otherList.add( buildAfterRange() );
-      }
-      if (all || freq.others.contains(FacetRangeOther.BETWEEN)) {
-        actual_end = loop_end;
-        otherList.add( buildBetweenRange() );
-      }
-    }
-    // if we're not a shard request, or this is a hardend:true situation, then actual_end isn't needed
-    if (freq.hardend || (! fcontext.isShard())) {
-      actual_end = null;
-    }
-  }
-
-  /**
-   * Parses the given list of maps and returns list of Ranges
-   *
-   * @param input - list of map containing the ranges
-   * @return list of {@link Range}
-   */
-  private List<Range> parseRanges(Object input) {
-    if (!(input instanceof List)) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-          "Expected List for ranges but got " + input.getClass().getSimpleName() + " = " + input
-      );
-    }
-    List intervals = (List) input;
-    List<Range> ranges = new ArrayList<>();
-    for (Object obj : intervals) {
-      if (!(obj instanceof Map)) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Expected Map for range but got " + obj.getClass().getSimpleName() + " = " + obj);
-      }
-      Range range;
-      Map<String, Object> interval = (Map<String, Object>) obj;
-      if (interval.containsKey("range")) {
-        range = getRangeByOldFormat(interval);
-      } else {
-        range = getRangeByNewFormat(interval);
-      }
-      ranges.add(range);
-    }
-    return ranges;
-  }
-
-  private boolean getBoolean(Map<String,Object> args, String paramName, boolean defVal) {
-    Object o = args.get(paramName);
-    if (o == null) {
-      return defVal;
-    }
-    // TODO: should we be more flexible and accept things like "true" (strings)?
-    // Perhaps wait until the use case comes up.
-    if (!(o instanceof Boolean)) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-          "Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
-    }
-
-    return (Boolean)o;
-  }
-
-  private String getString(Map<String,Object> args, String paramName, boolean required) {
-    Object o = args.get(paramName);
-    if (o == null) {
-      if (required) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Missing required parameter '" + paramName + "' for " + args);
-      }
-      return null;
-    }
-    if (!(o instanceof String)) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-          "Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
-    }
-
-    return (String)o;
-  }
-
-  /**
-   * Parses the range given in format {from:val1, to:val2, inclusive_to:true}
-   * and returns the {@link Range}
-   *
-   * @param rangeMap Map containing the range info
-   * @return {@link Range}
-   */
-  private Range getRangeByNewFormat(Map<String, Object> rangeMap) {
-    Object fromObj = rangeMap.get("from");
-    Object toObj = rangeMap.get("to");
-
-    String fromStr = fromObj == null? "*" : fromObj.toString();
-    String toStr = toObj == null? "*": toObj.toString();
-    boolean includeUpper = getBoolean(rangeMap, "inclusive_to", false);
-    boolean includeLower = getBoolean(rangeMap, "inclusive_from", true);
-
-    Object key = rangeMap.get("key");
-    // if (key == null) {
-    //  key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")");
-    // }
-    // using the default key as custom key won't work with refine
-    // refine would need both low and high values
-    key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")");
-
-    Comparable from = getComparableFromString(fromStr);
-    Comparable to = getComparableFromString(toStr);
-    if (from != null && to != null && from.compareTo(to) > 0) {
-      // allowing from and to be same
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'from' is higher than 'to' in range for key: " + key);
-    }
-
-    return new Range(key, from, to, includeLower, includeUpper);
-  }
-
-  /**
-   * Parses the range string from the map and Returns {@link Range}
-   *
-   * @param range map containing the interval
-   * @return {@link Range}
-   */
-  private Range getRangeByOldFormat(Map<String, Object> range) {
-    String key = getString(range, "key", false);
-    String rangeStr = getString(range, "range", true);
-    try {
-      return parseRangeFromString(key, rangeStr);
-    } catch (SyntaxError e) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
-    }
-  }
-
-  /**
-   * Parses the given string and returns Range.
-   * This is adopted from {@link org.apache.solr.request.IntervalFacets}
-   *
-   * @param key The name of range which would be used as {@link Range}'s label
-   * @param rangeStr The string containing the Range
-   * @return {@link Range}
-   */
-  private Range parseRangeFromString(String key, String rangeStr) throws SyntaxError {
-    rangeStr = rangeStr.trim();
-    if (rangeStr.isEmpty()) {
-      throw new SyntaxError("empty facet range");
-    }
-
-    boolean includeLower = true, includeUpper = true;
-    Comparable start = null, end = null;
-    if (rangeStr.charAt(0) == '(') {
-      includeLower = false;
-    } else if (rangeStr.charAt(0) != '[') {
-      throw new SyntaxError( "Invalid start character " + rangeStr.charAt(0) + " in facet range " + rangeStr);
-    }
-
-    final int lastNdx = rangeStr.length() - 1;
-    if (rangeStr.charAt(lastNdx) == ')') {
-      includeUpper = false;
-    } else if (rangeStr.charAt(lastNdx) != ']') {
-      throw new SyntaxError("Invalid end character " + rangeStr.charAt(lastNdx) + " in facet range " + rangeStr);
-    }
-
-    StringBuilder startStr = new StringBuilder(lastNdx);
-    int i = unescape(rangeStr, 1, lastNdx, startStr);
-    if (i == lastNdx) {
-      if (rangeStr.charAt(lastNdx - 1) == ',') {
-        throw new SyntaxError("Empty range limit");
-      }
-      throw new SyntaxError("Missing unescaped comma separating range ends in " + rangeStr);
-    }
-    start = getComparableFromString(startStr.toString());
-
-    StringBuilder endStr = new StringBuilder(lastNdx);
-    i = unescape(rangeStr, i, lastNdx, endStr);
-    if (i != lastNdx) {
-      throw new SyntaxError("Extra unescaped comma at index " + i + " in range " + rangeStr);
-    }
-    end = getComparableFromString(endStr.toString());
-
-    if (start != null && end != null && start.compareTo(end) > 0) {
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' is higher than 'end' in range for key: " + rangeStr);
-    }
-
-    // not using custom key as it won't work with refine
-    // refine would need both low and high values
-    return new Range(rangeStr, start, end, includeLower, includeUpper);
-  }
-
-  /* Fill in sb with a string from i to the first unescaped comma, or n.
-      Return the index past the unescaped comma, or n if no unescaped comma exists */
-  private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError {
-    for (; i < n; ++i) {
-      char c = s.charAt(i);
-      if (c == '\\') {
-        ++i;
-        if (i < n) {
-          c = s.charAt(i);
-        } else {
-          throw new SyntaxError("Unfinished escape at index " + i + " in facet range " + s);
-        }
-      } else if (c == ',') {
-        return i + 1;
-      }
-      sb.append(c);
-    }
-    return n;
-  }
-
-  private Comparable getComparableFromString(String value) {
-    value = value.trim();
-    if ("*".equals(value)) {
-      return null;
-    }
-    return calc.getValue(value);
-  }
-
-  private  SimpleOrderedMap getRangeCountsIndexed() throws IOException {
-
-    int slotCount = rangeList.size() + otherList.size();
-    intersections = new DocSet[slotCount];
-    filters = new Query[slotCount];
-
-
-    createAccs(fcontext.base.size(), slotCount);
-
-    for (int idx = 0; idx<rangeList.size(); idx++) {
-      rangeStats(rangeList.get(idx), idx);
-    }
-
-    for (int idx = 0; idx<otherList.size(); idx++) {
-      rangeStats(otherList.get(idx), rangeList.size() + idx);
-    }
-
-
-    final SimpleOrderedMap res = new SimpleOrderedMap<>();
-    List<SimpleOrderedMap> buckets = new ArrayList<>();
-    res.add("buckets", buckets);
-
-    for (int idx = 0; idx<rangeList.size(); idx++) {
-      if (effectiveMincount > 0 && countAcc.getCount(idx) < effectiveMincount) continue;
-      Range range = rangeList.get(idx);
-      SimpleOrderedMap bucket = new SimpleOrderedMap();
-      buckets.add(bucket);
-      bucket.add("val", range.label);
-      addStats(bucket, idx);
-      doSubs(bucket, idx);
-    }
-
-    for (int idx = 0; idx<otherList.size(); idx++) {
-      // we don't skip these buckets based on mincount
-      Range range = otherList.get(idx);
-      SimpleOrderedMap bucket = new SimpleOrderedMap();
-      res.add(range.label.toString(), bucket);
-      addStats(bucket, rangeList.size() + idx);
-      doSubs(bucket, rangeList.size() + idx);
-    }
-
-    if (null != actual_end) {
-      res.add(FacetRange.ACTUAL_END_JSON_KEY, calc.formatValue(actual_end));
-    }
-
-    return res;
-  }
-
-  private Query[] filters;
-  private DocSet[] intersections;
-  private void rangeStats(Range range, int slot) throws IOException {
-    Query rangeQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper);
-    // TODO: specialize count only
-    DocSet intersection = fcontext.searcher.getDocSet(rangeQ, fcontext.base);
-    filters[slot] = rangeQ;
-    intersections[slot] = intersection;  // save for later  // TODO: only save if number of slots is small enough?
-    long num = collect(intersection, slot, slotNum -> { return new SlotContext(rangeQ); });
-    countAcc.incrementCount(slot, num); // TODO: roll this into collect()
-  }
-
-  private void doSubs(SimpleOrderedMap bucket, int slot) throws IOException {
-    // handle sub-facets for this bucket
-    if (freq.getSubFacets().size() > 0) {
-      DocSet subBase = intersections[slot];
-      try {
-        processSubs(bucket, filters[slot], subBase, false, null);
-      } finally {
-        // subContext.base.decref();  // OFF-HEAP
-        // subContext.base = null;  // do not modify context after creation... there may be deferred execution (i.e. streaming)
-      }
-    }
-  }
-
-  // Essentially copied from SimpleFacets...
-  // would be nice to unify this stuff w/ analytics component...
-  /**
-   * Perhaps someday instead of having a giant "instanceof" case
-   * statement to pick an impl, we can add a "RangeFacetable" marker
-   * interface to FieldTypes and they can return instances of these
-   * directly from some method -- but until then, keep this locked down
-   * and private.
-   */
-  static abstract class Calc {
-    protected final SchemaField field;
-    public Calc(final SchemaField field) {
-      this.field = field;
-    }
-
-    /**
-     * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for <code>range</code> faceting
-     */
-    public Comparable bitsToValue(long bits) {
-      return bits;
-    }
-
-    /**
-     * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for <code>range</code> faceting
-     */
-    public long bitsToSortableBits(long bits) {
-      return bits;
-    }
-
-    /**
-     * Given the low value for a bucket, generates the appropriate "label" object to use.
-     * By default return the low object unmodified.
-     */
-    public Object buildRangeLabel(Comparable low) {
-      return low;
-    }
-    
-    /**
-     * Formats a value into a label used in a response
-     * Default Impl just uses toString()
-     */
-    public String formatValue(final Comparable val) {
-      return val.toString();
-    }
-
-    /**
-     * Parses a String param into a value throwing
-     * an exception if not possible
-     */
-    public final Comparable getValue(final String rawval) {
-      try {
-        return parseStr(rawval);
-      } catch (Exception e) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Can't parse value "+rawval+" for field: " +
-                field.getName(), e);
-      }
-    }
-
-    /**
-     * Parses a String param into a value.
-     * Can throw a low level format exception as needed.
-     */
-    protected abstract Comparable parseStr(final String rawval)
-        throws java.text.ParseException;
-
-    /**
-     * Parses a String param into a value that represents the gap and
-     * can be included in the response, throwing
-     * a useful exception if not possible.
-     *
-     * Note: uses Object as the return type instead of T for things like
-     * Date where gap is just a DateMathParser string
-     */
-    public final Object getGap(final String gap) {
-      try {
-        return parseGap(gap);
-      } catch (Exception e) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Can't parse gap "+gap+" for field: " +
-                field.getName(), e);
-      }
-    }
-
-    /**
-     * Parses a String param into a value that represents the gap and
-     * can be included in the response.
-     * Can throw a low level format exception as needed.
-     *
-     * Default Impl calls parseVal
-     */
-    protected Object parseGap(final String rawval) throws java.text.ParseException {
-      return parseStr(rawval);
-    }
-
-    /**
-     * Adds the String gap param to a low Range endpoint value to determine
-     * the corresponding high Range endpoint value, throwing
-     * a useful exception if not possible.
-     */
-    public final Comparable addGap(Comparable value, String gap) {
-      try {
-        return parseAndAddGap(value, gap);
-      } catch (Exception e) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Can't add gap "+gap+" to value " + value +
-                " for field: " + field.getName(), e);
-      }
-    }
-    /**
-     * Adds the String gap param to a low Range endpoint value to determine
-     * the corresponding high Range endpoint value.
-     * Can throw a low level format exception as needed.
-     */
-    protected abstract Comparable parseAndAddGap(Comparable value, String gap)
-        throws java.text.ParseException;
-
-  }
-
-  private static class FloatCalc extends Calc {
-
-    @Override
-    public Comparable bitsToValue(long bits) {
-      if (field.getType().isPointField() && field.multiValued()) {
-        return NumericUtils.sortableIntToFloat((int)bits);
-      } else {
-        return Float.intBitsToFloat( (int)bits );
-      }
-    }
-
-    @Override
-    public long bitsToSortableBits(long bits) {
-      return NumericUtils.sortableDoubleBits(bits);
-    }
-
-    public FloatCalc(final SchemaField f) { super(f); }
-    @Override
-    protected Float parseStr(String rawval) {
-      return Float.valueOf(rawval);
-    }
-    @Override
-    public Float parseAndAddGap(Comparable value, String gap) {
-      return ((Number) value).floatValue() + Float.parseFloat(gap);
-    }
-  }
-
-  private static class DoubleCalc extends Calc {
-    @Override
-    public Comparable bitsToValue(long bits) {
-      if (field.getType().isPointField() && field.multiValued()) {
-        return NumericUtils.sortableLongToDouble(bits);
-      } else {
-        return Double.longBitsToDouble(bits);
-      }
-    }
-
-    @Override
-    public long bitsToSortableBits(long bits) {
-      return NumericUtils.sortableDoubleBits(bits);
-    }
-
-    public DoubleCalc(final SchemaField f) { super(f); }
-    @Override
-    protected Double parseStr(String rawval) {
-      return Double.valueOf(rawval);
-    }
-    @Override
-    public Double parseAndAddGap(Comparable value, String gap) {
-      return ((Number) value).doubleValue() + Double.parseDouble(gap);
-    }
-  }
-
-  private static class IntCalc extends Calc {
-
-    public IntCalc(final SchemaField f) { super(f); }
-    @Override
-    public Comparable bitsToValue(long bits) {
-      return (int)bits;
-    }
-    @Override
-    protected Integer parseStr(String rawval) {
-      return Integer.valueOf(rawval);
-    }
-    @Override
-    public Integer parseAndAddGap(Comparable value, String gap) {
-      return ((Number) value).intValue() + Integer.parseInt(gap);
-    }
-  }
-
-  private static class LongCalc extends Calc {
-
-    public LongCalc(final SchemaField f) { super(f); }
-    @Override
-    protected Long parseStr(String rawval) {
-      return Long.valueOf(rawval);
-    }
-    @Override
-    public Long parseAndAddGap(Comparable value, String gap) {
-      return ((Number) value).longValue() + Long.parseLong(gap);
-    }
-  }
-
-  private static class DateCalc extends Calc {
-    private final Date now;
-    public DateCalc(final SchemaField f,
-                    final Date now) {
-      super(f);
-      this.now = now;
-      if (!(field.getType() instanceof TrieDateField || field.getType().isPointField() ||
-          field.getType() instanceof DateRangeField)) {
-        throw new IllegalArgumentException("SchemaField must use field type extending TrieDateField, DateRangeField or PointField");
-      }
-    }
-
-    @Override
-    public Comparable bitsToValue(long bits) {
-      return new Date(bits);
-    }
-
-    @Override
-    public String formatValue(Comparable val) {
-      return ((Date)val).toInstant().toString();
-    }
-    @Override
-    protected Date parseStr(String rawval) {
-      return DateMathParser.parseMath(now, rawval);
-    }
-    @Override
-    protected Object parseGap(final String rawval) {
-      return rawval;
-    }
-    @Override
-    public Date parseAndAddGap(Comparable value, String gap) throws java.text.ParseException {
-      final DateMathParser dmp = new DateMathParser();
-      dmp.setNow((Date)value);
-      return dmp.parseMath(gap);
-    }
-  }
-
-  private static class CurrencyCalc extends Calc {
-    private String defaultCurrencyCode;
-    private ExchangeRateProvider exchangeRateProvider;
-    public CurrencyCalc(final SchemaField field) {
-      super(field);
-      if(!(this.field.getType() instanceof CurrencyFieldType)) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                                "Cannot perform range faceting over non CurrencyField fields");
-      }
-      defaultCurrencyCode =
-        ((CurrencyFieldType)this.field.getType()).getDefaultCurrency();
-      exchangeRateProvider =
-        ((CurrencyFieldType)this.field.getType()).getProvider();
-    }
-
-    /** 
-     * Throws a Server Error that this type of operation is not supported for this field 
-     * {@inheritDoc} 
-     */
-    @Override
-    public Comparable bitsToValue(long bits) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                              "Currency Field " + field.getName() + " can not be used in this way");
-    }
-
-    /** 
-     * Throws a Server Error that this type of operation is not supported for this field 
-     * {@inheritDoc} 
-     */
-    @Override
-    public long bitsToSortableBits(long bits) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                              "Currency Field " + field.getName() + " can not be used in this way");
-    }
-
-    /**
-     * Returns the short string representation of the CurrencyValue
-     * @see CurrencyValue#strValue
-     */
-    @Override
-    public Object buildRangeLabel(Comparable low) {
-      return ((CurrencyValue)low).strValue();
-    }
-    
-    @Override
-    public String formatValue(Comparable val) {
-      return ((CurrencyValue)val).strValue();
-    }
-
-    @Override
-    protected Comparable parseStr(final String rawval) throws java.text.ParseException {
-      return CurrencyValue.parse(rawval, defaultCurrencyCode);
-    }
-
-    @Override
-    protected Object parseGap(final String rawval) throws java.text.ParseException {
-      return parseStr(rawval);
-    }
-
-    @Override
-    protected Comparable parseAndAddGap(Comparable value, String gap) throws java.text.ParseException{
-      if (value == null) {
-        throw new NullPointerException("Cannot perform range faceting on null CurrencyValue");
-      }
-      CurrencyValue val = (CurrencyValue) value;
-      CurrencyValue gapCurrencyValue =
-        CurrencyValue.parse(gap, defaultCurrencyCode);
-      long gapAmount =
-        CurrencyValue.convertAmount(this.exchangeRateProvider,
-                                    gapCurrencyValue.getCurrencyCode(),
-                                    gapCurrencyValue.getAmount(),
-                                    val.getCurrencyCode());
-      return new CurrencyValue(val.getAmount() + gapAmount,
-                               val.getCurrencyCode());
-
-    }
-
-  }
-
-  protected SimpleOrderedMap<Object> refineFacets() throws IOException {
-    // this refineFacets method is patterned after FacetFieldProcessor.refineFacets such that
-    // the same "_s" skip bucket syntax is used and FacetRangeMerger can subclass FacetRequestSortedMerger
-    // for dealing with them & the refinement requests.
-    // 
-    // But range faceting does *NOT* use the "leaves" and "partial" syntax
-    // 
-    // If/When range facet becomes more like field facet in it's ability to sort and limit the "range buckets"
-    // FacetRangeProcessor and FacetFieldProcessor should probably be refactored to share more code.
-    
-    boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
-
-    List<List> skip = FacetFieldProcessor.asList(fcontext.facetInfo.get("_s"));    // We have seen this bucket, so skip stats on it, and skip sub-facets except for the specified sub-facets that should calculate specified buckets.
-
-    // sanity check our merger's super class didn't send us something we can't handle ...
-    assert 0 == FacetFieldProcessor.asList(fcontext.facetInfo.get("_l")).size();
-    assert 0 == FacetFieldProcessor.asList(fcontext.facetInfo.get("_p")).size();
-
-    SimpleOrderedMap<Object> res = new SimpleOrderedMap<>();
-    List<SimpleOrderedMap> bucketList = new ArrayList<>( skip.size() );
-    res.add("buckets", bucketList);
-
-    // TODO: an alternate implementations can fill all accs at once
-    createAccs(-1, 1);
-
-    for (List bucketAndFacetInfo : skip) {
-      assert bucketAndFacetInfo.size() == 2;
-      Object bucketVal = bucketAndFacetInfo.get(0);
-      Map<String,Object> facetInfo = (Map<String, Object>) bucketAndFacetInfo.get(1);
-
-      bucketList.add( refineBucket(bucketVal, true, facetInfo ) );
-    }
-
-    { // refine the special "other" buckets
-      
-      // NOTE: we're re-using this variable for each special we look for...
-      Map<String,Object> specialFacetInfo;
-
-      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.BEFORE.toString());
-      if (null != specialFacetInfo) {
-        res.add(FacetRangeOther.BEFORE.toString(),
-                refineRange(buildBeforeRange(), skipThisFacet, specialFacetInfo));
-      }
-      
-      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.AFTER.toString());
-      if (null != specialFacetInfo) {
-        res.add(FacetRangeOther.AFTER.toString(),
-                refineRange(buildAfterRange(), skipThisFacet, specialFacetInfo));
-      }
-      
-      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.BETWEEN.toString());
-      if (null != specialFacetInfo) {
-        res.add(FacetRangeOther.BETWEEN.toString(),
-                refineRange(buildBetweenRange(), skipThisFacet, specialFacetInfo));
-      }
-    }
-      
-    return res;
-  }
-
-  /** 
-   * Returns the "Actual End" value sent from the merge as part of the refinement request (if any) 
-   * or re-computes it as needed using the Calc and caches the result for re-use
-   */
-  private Comparable getOrComputeActualEndForRefinement() {
-    if (null != actual_end) {
-      return actual_end;
-    }
-    
-    if (freq.hardend) {
-      actual_end = this.end;
-    } else if (fcontext.facetInfo.containsKey(FacetRange.ACTUAL_END_JSON_KEY)) {
-      actual_end = calc.getValue(fcontext.facetInfo.get(FacetRange.ACTUAL_END_JSON_KEY).toString());
-    } else {
-      // a quick and dirty loop over the ranges (we don't need) to compute the actual_end...
-      Comparable low = start;
-      while (low.compareTo(end) < 0) {
-        Comparable high = calc.addGap(low, gap);
-        if (end.compareTo(high) < 0) {
-          actual_end = high;
-          break;
-        }
-        if (high.compareTo(low) <= 0) {
-          throw new SolrException
-            (SolrException.ErrorCode.BAD_REQUEST,
-             "Garbage input for facet refinement w/o " + FacetRange.ACTUAL_END_JSON_KEY);
-        }
-        low = high;
-      }
-    }
-    
-    assert null != actual_end;
-    return actual_end;
-  }
-  
-  private SimpleOrderedMap<Object> refineBucket(Object bucketVal, boolean skip, Map<String,Object> facetInfo) throws IOException {
-
-    String val = bucketVal.toString();
-    if (ranges != null) {
-      try {
-        Range range = parseRangeFromString(val, val);
-        final SimpleOrderedMap<Object> bucket = refineRange(range, skip, facetInfo);
-        bucket.add("val", range.label);
-        return bucket;
-      } catch (SyntaxError e) {
-        // execution won't reach here as ranges are already validated
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
-      }
-    }
-
-    Comparable low = calc.getValue(val);
-    Comparable high = calc.addGap(low, gap);
-    Comparable max_end = end;
-    if (end.compareTo(high) < 0) {
-      if (freq.hardend) {
-        high = max_end;
-      } else {
-        max_end = high;
-      }
-    }
-    if (high.compareTo(low) < 0) {
-      throw new SolrException
-          (SolrException.ErrorCode.BAD_REQUEST,
-              "range facet infinite loop (is gap negative? did the math overflow?)");
-    }
-    if (high.compareTo(low) == 0) {
-      throw new SolrException
-          (SolrException.ErrorCode.BAD_REQUEST,
-              "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high );
-    }
-
-    boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
-                        (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
-    boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-                        (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(max_end)));
-
-    Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper);
-
-    // now refine this range
-
-    final SimpleOrderedMap<Object> bucket = refineRange(range, skip, facetInfo);
-    bucket.add("val", range.label);
-
-    return bucket;
-  }
-
-  /** Helper method for refining a Range
-   * @see #fillBucket
-   */
-  private SimpleOrderedMap<Object> refineRange(Range range, boolean skip, Map<String,Object> facetInfo) throws IOException {
-    final SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
-    final Query domainQ = sf.getType().getRangeQuery(null, sf, range.low == null ? null : calc.formatValue(range.low), range.high==null ? null : calc.formatValue(range.high), range.includeLower, range.includeUpper);
-    fillBucket(bucket, domainQ, null, skip, facetInfo);
-    return bucket;
-  }
-  
-  /** Helper method for building a "before" Range */
-  private Range buildBeforeRange() {
-    // include upper bound if "outer" or if first gap doesn't already include it
-    final boolean incUpper = (include.contains(FacetRangeInclude.OUTER) ||
-                              (!(include.contains(FacetRangeInclude.LOWER) ||
-                                 include.contains(FacetRangeInclude.EDGE))));
-    return new Range(FacetRangeOther.BEFORE.toString(), null, start, false, incUpper);
-  }
-
-  /** Helper method for building a "after" Range */
-  private Range buildAfterRange() {
-    final Comparable the_end = getOrComputeActualEndForRefinement();
-    assert null != the_end;
-    final boolean incLower = (include.contains(FacetRangeInclude.OUTER) ||
-                              (!(include.contains(FacetRangeInclude.UPPER) ||
-                                 include.contains(FacetRangeInclude.EDGE))));
-    return new Range(FacetRangeOther.AFTER.toString(), the_end, null, incLower, false);
-  }
-
-  /** Helper method for building a "between" Range */
-  private Range buildBetweenRange() {
-    final Comparable the_end = getOrComputeActualEndForRefinement();
-    assert null != the_end;
-    final boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
-                              include.contains(FacetRangeInclude.EDGE));
-    final boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-                              include.contains(FacetRangeInclude.EDGE));
-    return new Range(FacetRangeOther.BETWEEN.toString(), start, the_end, incLower, incUpper);
-  }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java
index aa7112e..5aaafa1 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeMerger.java
@@ -89,7 +89,7 @@ public class FacetRangeMerger extends FacetRequestSortedMerger<FacetRange> {
     return refinement;
   }
   
-  public void merge(SimpleOrderedMap facetResult, Context mcontext) {
+  public void merge(@SuppressWarnings("rawtypes") SimpleOrderedMap facetResult, Context mcontext) {
     boolean all = freq.others.contains(FacetParams.FacetRangeOther.ALL);
 
     if (all || freq.others.contains(FacetParams.FacetRangeOther.BEFORE)) {
@@ -131,12 +131,14 @@ public class FacetRangeMerger extends FacetRequestSortedMerger<FacetRange> {
       }
     }
 
+    @SuppressWarnings({"unchecked", "rawtypes"})
     List<SimpleOrderedMap> bucketList = (List<SimpleOrderedMap>) facetResult.get("buckets");
     mergeBucketList(bucketList , mcontext);
   }
 
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public Object getMergedResult() {
     // TODO: use sortedBuckets
     SimpleOrderedMap result = new SimpleOrderedMap(4);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java
new file mode 100644
index 0000000..4098450
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeParser.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+import org.apache.solr.common.params.FacetParams;
+import org.apache.solr.search.SyntaxError;
+
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Map;
+
+class FacetRangeParser extends FacetParser<FacetRange> {
+  @SuppressWarnings({"rawtypes"})
+  public FacetRangeParser(FacetParser parent, String key) {
+    super(parent, key);
+    facet = new FacetRange();
+  }
+
+  public FacetRange parse(Object arg) throws SyntaxError {
+    parseCommonParams(arg);
+
+    if (!(arg instanceof Map)) {
+      throw err("Missing range facet arguments");
+    }
+
+    @SuppressWarnings({"unchecked"})
+    Map<String, Object> m = (Map<String, Object>) arg;
+
+    facet.field = getString(m, "field", null);
+    facet.ranges = getVal(m, "ranges", false);
+
+    boolean required = facet.ranges == null;
+    facet.start = getVal(m, "start", required);
+    facet.end = getVal(m, "end", required);
+    facet.gap = getVal(m, "gap", required);
+    facet.hardend = getBoolean(m, "hardend", facet.hardend);
+    facet.mincount = getLong(m, "mincount", 0);
+
+    // TODO: refactor list-of-options code
+
+    List<String> list = getStringList(m, "include", false);
+    String[] includeList = null;
+    if (list != null) {
+      includeList = list.toArray(new String[list.size()]);
+    }
+    facet.include = FacetParams.FacetRangeInclude.parseParam( includeList );
+    facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);
+
+    List<String> other = getStringList(m, "other", false);
+    if (other != null) {
+      for (String otherStr : other) {
+        facet.others.add( FacetParams.FacetRangeOther.get(otherStr) );
+      }
+    }
+
+    Object facetObj = m.get("facet");
+    parseSubs(facetObj);
+
+    return facet;
+  }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
similarity index 84%
copy from solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
copy to solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
index b02834c..7319d28 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRangeProcessor.java
@@ -14,92 +14,37 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.solr.search.facet;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.NumericUtils;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.FacetParams.FacetRangeInclude;
-import org.apache.solr.common.params.FacetParams.FacetRangeOther;
+import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.schema.CurrencyFieldType;
-import org.apache.solr.schema.CurrencyValue;
-import org.apache.solr.schema.DateRangeField;
-import org.apache.solr.schema.ExchangeRateProvider;
-import org.apache.solr.schema.FieldType;
-import org.apache.solr.schema.SchemaField;
-import org.apache.solr.schema.TrieDateField;
-import org.apache.solr.schema.TrieField;
+import org.apache.solr.schema.*;
 import org.apache.solr.search.DocSet;
 import org.apache.solr.search.SyntaxError;
-import org.apache.solr.search.facet.SlotAcc.SlotContext;
 import org.apache.solr.util.DateMathParser;
 
-import static org.apache.solr.search.facet.FacetRequest.FacetContext.SKIP_FACET;
-
-public class FacetRange extends FacetRequestSorted {
-  static final String ACTUAL_END_JSON_KEY = "_actual_end";
-  
-  String field;
-  Object start;
-  Object end;
-  Object gap;
-  Object ranges;
-  boolean hardend = false;
-  EnumSet<FacetRangeInclude> include;
-  EnumSet<FacetRangeOther> others;
-
-  {
-    // defaults
-    mincount = 0;
-    limit = -1;
-  }
-
-  @Override
-  public FacetProcessor createFacetProcessor(FacetContext fcontext) {
-    return new FacetRangeProcessor(fcontext, this);
-  }
-
-  @Override
-  public FacetMerger createFacetMerger(Object prototype) {
-    return new FacetRangeMerger(this);
-  }
-  
-  @Override
-  public Map<String, Object> getFacetDescription() {
-    Map<String, Object> descr = new HashMap<>();
-    descr.put("field", field);
-    if (ranges != null) {
-      descr.put("ranges", ranges);
-    } else {
-      descr.put("start", start);
-      descr.put("end", end);
-      descr.put("gap", gap);
-    }
-    return descr;
-  }
-  
-}
+import java.io.IOException;
+import java.util.*;
 
+import static org.apache.solr.search.facet.FacetContext.SKIP_FACET;
 
 class FacetRangeProcessor extends FacetProcessor<FacetRange> {
   // TODO: the code paths for initial faceting, vs refinement, are very different...
   // TODO: ...it might make sense to have seperate classes w/a common base?
   // TODO: let FacetRange.createFacetProcessor decide which one to instantiate?
-  
+
   final SchemaField sf;
   final Calc calc;
-  final EnumSet<FacetRangeInclude> include;
+  final EnumSet<FacetParams.FacetRangeInclude> include;
   final long effectiveMincount;
+  @SuppressWarnings({"rawtypes"})
   final Comparable start;
+  @SuppressWarnings({"rawtypes"})
   final Comparable end;
   final String gap;
   final Object ranges;
@@ -115,15 +60,16 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
    * <li>If this is a phase#1 shard request, then {@link #createRangeList} will set this value (non null)
    *     if and only if it is needed for refinement (ie: <code>hardend:false</code> &amp; <code>other</code>
    *     that requires an end value low/high value calculation).  And it wil be included in the response</li>
-   * <li>If this is a phase#2 refinement request, this variable will be used 
-   *     {@link #getOrComputeActualEndForRefinement} to track the value sent with the refinement request 
-   *     -- or to cache a recomputed value if the request omitted it -- for use in refining the 
+   * <li>If this is a phase#2 refinement request, this variable will be used
+   *     {@link #getOrComputeActualEndForRefinement} to track the value sent with the refinement request
+   *     -- or to cache a recomputed value if the request omitted it -- for use in refining the
    *     <code>other</code> buckets that need them</li>
    * </ul>
    */
+  @SuppressWarnings({"rawtypes"})
   Comparable actual_end = null; // null until/unless we need it
 
-  FacetRangeProcessor(FacetRequest.FacetContext fcontext, FacetRange freq) {
+  FacetRangeProcessor(FacetContext fcontext, FacetRange freq) {
     super(fcontext, freq);
     include = freq.include;
     sf = fcontext.searcher.getSchema().getField(freq.field);
@@ -155,6 +101,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
   }
 
   @Override
+  @SuppressWarnings({"unchecked"})
   public void process() throws IOException {
     super.process();
 
@@ -167,8 +114,10 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
   }
 
+  @SuppressWarnings({"rawtypes"})
   private static class Range {
     Object label;
+
     Comparable low;
     Comparable high;
     boolean includeLower;
@@ -186,7 +135,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
   /**
    * Returns a {@link Calc} instance to use for <em>term</em> faceting over a numeric field.
    * This method is unused for <code>range</code> faceting, and exists solely as a helper method for other classes
-   * 
+   *
    * @param sf A field to facet on, must be of a type such that {@link FieldType#getNumberType} is non null
    * @return a <code>Calc</code> instance with {@link Calc#bitsToValue} and {@link Calc#bitsToSortableBits} methods suitable for the specified field.
    * @see FacetFieldProcessorByHashDV
@@ -256,9 +205,10 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
 
     // if we made it this far, we have no idea what it is...
     throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                            "Unable to range facet on field:" + sf.getName());
+        "Unable to range facet on field:" + sf.getName());
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private void createRangeList() throws IOException {
 
     rangeList = new ArrayList<>();
@@ -292,10 +242,10 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
                 "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high);
       }
 
-      boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
-          (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
-      boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-          (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end)));
+      boolean incLower = (include.contains(FacetParams.FacetRangeInclude.LOWER) ||
+          (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
+      boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.UPPER) ||
+          (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == high.compareTo(end)));
 
       Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper);
       rangeList.add( range );
@@ -305,17 +255,17 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
 
     // no matter what other values are listed, we don't do
     // anything if "none" is specified.
-    if (! freq.others.contains(FacetRangeOther.NONE) ) {
-      final boolean all = freq.others.contains(FacetRangeOther.ALL);
+    if (! freq.others.contains(FacetParams.FacetRangeOther.NONE) ) {
+      final boolean all = freq.others.contains(FacetParams.FacetRangeOther.ALL);
 
-      if (all || freq.others.contains(FacetRangeOther.BEFORE)) {
+      if (all || freq.others.contains(FacetParams.FacetRangeOther.BEFORE)) {
         otherList.add( buildBeforeRange() );
       }
-      if (all || freq.others.contains(FacetRangeOther.AFTER)) {
+      if (all || freq.others.contains(FacetParams.FacetRangeOther.AFTER)) {
         actual_end = loop_end;
         otherList.add( buildAfterRange() );
       }
-      if (all || freq.others.contains(FacetRangeOther.BETWEEN)) {
+      if (all || freq.others.contains(FacetParams.FacetRangeOther.BETWEEN)) {
         actual_end = loop_end;
         otherList.add( buildBetweenRange() );
       }
@@ -338,6 +288,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
           "Expected List for ranges but got " + input.getClass().getSimpleName() + " = " + input
       );
     }
+    @SuppressWarnings({"rawtypes"})
     List intervals = (List) input;
     List<Range> ranges = new ArrayList<>();
     for (Object obj : intervals) {
@@ -345,7 +296,9 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
             "Expected Map for range but got " + obj.getClass().getSimpleName() + " = " + obj);
       }
+      @SuppressWarnings({"unchecked"})
       Range range;
+      @SuppressWarnings({"unchecked"})
       Map<String, Object> interval = (Map<String, Object>) obj;
       if (interval.containsKey("range")) {
         range = getRangeByOldFormat(interval);
@@ -396,6 +349,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
    * @param rangeMap Map containing the range info
    * @return {@link Range}
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private Range getRangeByNewFormat(Map<String, Object> rangeMap) {
     Object fromObj = rangeMap.get("from");
     Object toObj = rangeMap.get("to");
@@ -415,6 +369,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
 
     Comparable from = getComparableFromString(fromStr);
     Comparable to = getComparableFromString(toStr);
+
     if (from != null && to != null && from.compareTo(to) > 0) {
       // allowing from and to be same
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'from' is higher than 'to' in range for key: " + key);
@@ -447,6 +402,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
    * @param rangeStr The string containing the Range
    * @return {@link Range}
    */
+  @SuppressWarnings({"rawtypes", "unchecked"})
   private Range parseRangeFromString(String key, String rangeStr) throws SyntaxError {
     rangeStr = rangeStr.trim();
     if (rangeStr.isEmpty()) {
@@ -514,6 +470,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     return n;
   }
 
+  @SuppressWarnings({"rawtypes"})
   private Comparable getComparableFromString(String value) {
     value = value.trim();
     if ("*".equals(value)) {
@@ -522,6 +479,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     return calc.getValue(value);
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private  SimpleOrderedMap getRangeCountsIndexed() throws IOException {
 
     int slotCount = rangeList.size() + otherList.size();
@@ -578,10 +536,11 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     DocSet intersection = fcontext.searcher.getDocSet(rangeQ, fcontext.base);
     filters[slot] = rangeQ;
     intersections[slot] = intersection;  // save for later  // TODO: only save if number of slots is small enough?
-    long num = collect(intersection, slot, slotNum -> { return new SlotContext(rangeQ); });
+    long num = collect(intersection, slot, slotNum -> { return new SlotAcc.SlotContext(rangeQ); });
     countAcc.incrementCount(slot, num); // TODO: roll this into collect()
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private void doSubs(SimpleOrderedMap bucket, int slot) throws IOException {
     // handle sub-facets for this bucket
     if (freq.getSubFacets().size() > 0) {
@@ -613,6 +572,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     /**
      * Used by {@link FacetFieldProcessorByHashDV} for field faceting on numeric types -- not used for <code>range</code> faceting
      */
+    @SuppressWarnings({"rawtypes"})
     public Comparable bitsToValue(long bits) {
       return bits;
     }
@@ -628,15 +588,15 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
      * Given the low value for a bucket, generates the appropriate "label" object to use.
      * By default return the low object unmodified.
      */
-    public Object buildRangeLabel(Comparable low) {
+    public Object buildRangeLabel(@SuppressWarnings("rawtypes") Comparable low) {
       return low;
     }
-    
+
     /**
      * Formats a value into a label used in a response
      * Default Impl just uses toString()
      */
-    public String formatValue(final Comparable val) {
+    public String formatValue(@SuppressWarnings("rawtypes") final Comparable val) {
       return val.toString();
     }
 
@@ -644,6 +604,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
      * Parses a String param into a value throwing
      * an exception if not possible
      */
+    @SuppressWarnings({"rawtypes"})
     public final Comparable getValue(final String rawval) {
       try {
         return parseStr(rawval);
@@ -658,6 +619,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
      * Parses a String param into a value.
      * Can throw a low level format exception as needed.
      */
+    @SuppressWarnings({"rawtypes"})
     protected abstract Comparable parseStr(final String rawval)
         throws java.text.ParseException;
 
@@ -695,6 +657,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
      * the corresponding high Range endpoint value, throwing
      * a useful exception if not possible.
      */
+    @SuppressWarnings({"rawtypes"})
     public final Comparable addGap(Comparable value, String gap) {
       try {
         return parseAndAddGap(value, gap);
@@ -709,6 +672,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
      * the corresponding high Range endpoint value.
      * Can throw a low level format exception as needed.
      */
+    @SuppressWarnings({"rawtypes"})
     protected abstract Comparable parseAndAddGap(Comparable value, String gap)
         throws java.text.ParseException;
 
@@ -716,6 +680,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
 
   private static class FloatCalc extends Calc {
 
+    @SuppressWarnings("rawtypes")
     @Override
     public Comparable bitsToValue(long bits) {
       if (field.getType().isPointField() && field.multiValued()) {
@@ -736,13 +701,14 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
       return Float.valueOf(rawval);
     }
     @Override
-    public Float parseAndAddGap(Comparable value, String gap) {
+    public Float parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) {
       return ((Number) value).floatValue() + Float.parseFloat(gap);
     }
   }
 
   private static class DoubleCalc extends Calc {
     @Override
+    @SuppressWarnings({"rawtypes"})
     public Comparable bitsToValue(long bits) {
       if (field.getType().isPointField() && field.multiValued()) {
         return NumericUtils.sortableLongToDouble(bits);
@@ -762,7 +728,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
       return Double.valueOf(rawval);
     }
     @Override
-    public Double parseAndAddGap(Comparable value, String gap) {
+    public Double parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) {
       return ((Number) value).doubleValue() + Double.parseDouble(gap);
     }
   }
@@ -771,6 +737,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
 
     public IntCalc(final SchemaField f) { super(f); }
     @Override
+    @SuppressWarnings({"rawtypes"})
     public Comparable bitsToValue(long bits) {
       return (int)bits;
     }
@@ -779,7 +746,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
       return Integer.valueOf(rawval);
     }
     @Override
-    public Integer parseAndAddGap(Comparable value, String gap) {
+    public Integer parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) {
       return ((Number) value).intValue() + Integer.parseInt(gap);
     }
   }
@@ -792,7 +759,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
       return Long.valueOf(rawval);
     }
     @Override
-    public Long parseAndAddGap(Comparable value, String gap) {
+    public Long parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) {
       return ((Number) value).longValue() + Long.parseLong(gap);
     }
   }
@@ -810,12 +777,13 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
 
     @Override
+    @SuppressWarnings({"rawtypes"})
     public Comparable bitsToValue(long bits) {
       return new Date(bits);
     }
 
     @Override
-    public String formatValue(Comparable val) {
+    public String formatValue(@SuppressWarnings("rawtypes") Comparable val) {
       return ((Date)val).toInstant().toString();
     }
     @Override
@@ -827,7 +795,7 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
       return rawval;
     }
     @Override
-    public Date parseAndAddGap(Comparable value, String gap) throws java.text.ParseException {
+    public Date parseAndAddGap(@SuppressWarnings("rawtypes") Comparable value, String gap) throws java.text.ParseException {
       final DateMathParser dmp = new DateMathParser();
       dmp.setNow((Date)value);
       return dmp.parseMath(gap);
@@ -841,32 +809,33 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
       super(field);
       if(!(this.field.getType() instanceof CurrencyFieldType)) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                                "Cannot perform range faceting over non CurrencyField fields");
+            "Cannot perform range faceting over non CurrencyField fields");
       }
       defaultCurrencyCode =
-        ((CurrencyFieldType)this.field.getType()).getDefaultCurrency();
+          ((CurrencyFieldType)this.field.getType()).getDefaultCurrency();
       exchangeRateProvider =
-        ((CurrencyFieldType)this.field.getType()).getProvider();
+          ((CurrencyFieldType)this.field.getType()).getProvider();
     }
 
-    /** 
-     * Throws a Server Error that this type of operation is not supported for this field 
-     * {@inheritDoc} 
+    /**
+     * Throws a Server Error that this type of operation is not supported for this field
+     * {@inheritDoc}
      */
     @Override
+    @SuppressWarnings({"rawtypes"})
     public Comparable bitsToValue(long bits) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                              "Currency Field " + field.getName() + " can not be used in this way");
+          "Currency Field " + field.getName() + " can not be used in this way");
     }
 
-    /** 
-     * Throws a Server Error that this type of operation is not supported for this field 
-     * {@inheritDoc} 
+    /**
+     * Throws a Server Error that this type of operation is not supported for this field
+     * {@inheritDoc}
      */
     @Override
     public long bitsToSortableBits(long bits) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                              "Currency Field " + field.getName() + " can not be used in this way");
+          "Currency Field " + field.getName() + " can not be used in this way");
     }
 
     /**
@@ -874,16 +843,17 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
      * @see CurrencyValue#strValue
      */
     @Override
-    public Object buildRangeLabel(Comparable low) {
+    public Object buildRangeLabel(@SuppressWarnings("rawtypes") Comparable low) {
       return ((CurrencyValue)low).strValue();
     }
-    
+
     @Override
-    public String formatValue(Comparable val) {
+    public String formatValue(@SuppressWarnings("rawtypes") Comparable val) {
       return ((CurrencyValue)val).strValue();
     }
 
     @Override
+    @SuppressWarnings({"rawtypes"})
     protected Comparable parseStr(final String rawval) throws java.text.ParseException {
       return CurrencyValue.parse(rawval, defaultCurrencyCode);
     }
@@ -894,35 +864,37 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
 
     @Override
+    @SuppressWarnings({"rawtypes"})
     protected Comparable parseAndAddGap(Comparable value, String gap) throws java.text.ParseException{
       if (value == null) {
         throw new NullPointerException("Cannot perform range faceting on null CurrencyValue");
       }
       CurrencyValue val = (CurrencyValue) value;
       CurrencyValue gapCurrencyValue =
-        CurrencyValue.parse(gap, defaultCurrencyCode);
+          CurrencyValue.parse(gap, defaultCurrencyCode);
       long gapAmount =
-        CurrencyValue.convertAmount(this.exchangeRateProvider,
-                                    gapCurrencyValue.getCurrencyCode(),
-                                    gapCurrencyValue.getAmount(),
-                                    val.getCurrencyCode());
+          CurrencyValue.convertAmount(this.exchangeRateProvider,
+              gapCurrencyValue.getCurrencyCode(),
+              gapCurrencyValue.getAmount(),
+              val.getCurrencyCode());
       return new CurrencyValue(val.getAmount() + gapAmount,
-                               val.getCurrencyCode());
+          val.getCurrencyCode());
 
     }
 
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   protected SimpleOrderedMap<Object> refineFacets() throws IOException {
     // this refineFacets method is patterned after FacetFieldProcessor.refineFacets such that
     // the same "_s" skip bucket syntax is used and FacetRangeMerger can subclass FacetRequestSortedMerger
     // for dealing with them & the refinement requests.
-    // 
+    //
     // But range faceting does *NOT* use the "leaves" and "partial" syntax
-    // 
+    //
     // If/When range facet becomes more like field facet in it's ability to sort and limit the "range buckets"
     // FacetRangeProcessor and FacetFieldProcessor should probably be refactored to share more code.
-    
+
     boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
 
     List<List> skip = FacetFieldProcessor.asList(fcontext.facetInfo.get("_s"));    // We have seen this bucket, so skip stats on it, and skip sub-facets except for the specified sub-facets that should calculate specified buckets.
@@ -947,41 +919,42 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     }
 
     { // refine the special "other" buckets
-      
+
       // NOTE: we're re-using this variable for each special we look for...
       Map<String,Object> specialFacetInfo;
 
-      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.BEFORE.toString());
+      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetParams.FacetRangeOther.BEFORE.toString());
       if (null != specialFacetInfo) {
-        res.add(FacetRangeOther.BEFORE.toString(),
-                refineRange(buildBeforeRange(), skipThisFacet, specialFacetInfo));
+        res.add(FacetParams.FacetRangeOther.BEFORE.toString(),
+            refineRange(buildBeforeRange(), skipThisFacet, specialFacetInfo));
       }
-      
-      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.AFTER.toString());
+
+      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetParams.FacetRangeOther.AFTER.toString());
       if (null != specialFacetInfo) {
-        res.add(FacetRangeOther.AFTER.toString(),
-                refineRange(buildAfterRange(), skipThisFacet, specialFacetInfo));
+        res.add(FacetParams.FacetRangeOther.AFTER.toString(),
+            refineRange(buildAfterRange(), skipThisFacet, specialFacetInfo));
       }
-      
-      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.BETWEEN.toString());
+
+      specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetParams.FacetRangeOther.BETWEEN.toString());
       if (null != specialFacetInfo) {
-        res.add(FacetRangeOther.BETWEEN.toString(),
-                refineRange(buildBetweenRange(), skipThisFacet, specialFacetInfo));
+        res.add(FacetParams.FacetRangeOther.BETWEEN.toString(),
+            refineRange(buildBetweenRange(), skipThisFacet, specialFacetInfo));
       }
     }
-      
+
     return res;
   }
 
-  /** 
-   * Returns the "Actual End" value sent from the merge as part of the refinement request (if any) 
+  /**
+   * Returns the "Actual End" value sent from the merge as part of the refinement request (if any)
    * or re-computes it as needed using the Calc and caches the result for re-use
    */
+  @SuppressWarnings({"rawtypes", "unchecked"})
   private Comparable getOrComputeActualEndForRefinement() {
     if (null != actual_end) {
       return actual_end;
     }
-    
+
     if (freq.hardend) {
       actual_end = this.end;
     } else if (fcontext.facetInfo.containsKey(FacetRange.ACTUAL_END_JSON_KEY)) {
@@ -997,17 +970,18 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
         }
         if (high.compareTo(low) <= 0) {
           throw new SolrException
-            (SolrException.ErrorCode.BAD_REQUEST,
-             "Garbage input for facet refinement w/o " + FacetRange.ACTUAL_END_JSON_KEY);
+              (SolrException.ErrorCode.BAD_REQUEST,
+                  "Garbage input for facet refinement w/o " + FacetRange.ACTUAL_END_JSON_KEY);
         }
         low = high;
       }
     }
-    
+
     assert null != actual_end;
     return actual_end;
   }
-  
+
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private SimpleOrderedMap<Object> refineBucket(Object bucketVal, boolean skip, Map<String,Object> facetInfo) throws IOException {
 
     String val = bucketVal.toString();
@@ -1044,10 +1018,10 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
               "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high );
     }
 
-    boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
-                        (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
-    boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-                        (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(max_end)));
+    boolean incLower = (include.contains(FacetParams.FacetRangeInclude.LOWER) ||
+        (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
+    boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.UPPER) ||
+        (include.contains(FacetParams.FacetRangeInclude.EDGE) && 0 == high.compareTo(max_end)));
 
     Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper);
 
@@ -1068,34 +1042,36 @@ class FacetRangeProcessor extends FacetProcessor<FacetRange> {
     fillBucket(bucket, domainQ, null, skip, facetInfo);
     return bucket;
   }
-  
+
   /** Helper method for building a "before" Range */
   private Range buildBeforeRange() {
     // include upper bound if "outer" or if first gap doesn't already include it
-    final boolean incUpper = (include.contains(FacetRangeInclude.OUTER) ||
-                              (!(include.contains(FacetRangeInclude.LOWER) ||
-                                 include.contains(FacetRangeInclude.EDGE))));
-    return new Range(FacetRangeOther.BEFORE.toString(), null, start, false, incUpper);
+    final boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.OUTER) ||
+        (!(include.contains(FacetParams.FacetRangeInclude.LOWER) ||
+            include.contains(FacetParams.FacetRangeInclude.EDGE))));
+    return new Range(FacetParams.FacetRangeOther.BEFORE.toString(), null, start, false, incUpper);
   }
 
   /** Helper method for building a "after" Range */
   private Range buildAfterRange() {
+    @SuppressWarnings({"rawtypes"})
     final Comparable the_end = getOrComputeActualEndForRefinement();
     assert null != the_end;
-    final boolean incLower = (include.contains(FacetRangeInclude.OUTER) ||
-                              (!(include.contains(FacetRangeInclude.UPPER) ||
-                                 include.contains(FacetRangeInclude.EDGE))));
-    return new Range(FacetRangeOther.AFTER.toString(), the_end, null, incLower, false);
+    final boolean incLower = (include.contains(FacetParams.FacetRangeInclude.OUTER) ||
+        (!(include.contains(FacetParams.FacetRangeInclude.UPPER) ||
+            include.contains(FacetParams.FacetRangeInclude.EDGE))));
+    return new Range(FacetParams.FacetRangeOther.AFTER.toString(), the_end, null, incLower, false);
   }
 
   /** Helper method for building a "between" Range */
   private Range buildBetweenRange() {
+    @SuppressWarnings({"rawtypes"})
     final Comparable the_end = getOrComputeActualEndForRefinement();
     assert null != the_end;
-    final boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
-                              include.contains(FacetRangeInclude.EDGE));
-    final boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-                              include.contains(FacetRangeInclude.EDGE));
-    return new Range(FacetRangeOther.BETWEEN.toString(), start, the_end, incLower, incUpper);
+    final boolean incLower = (include.contains(FacetParams.FacetRangeInclude.LOWER) ||
+        include.contains(FacetParams.FacetRangeInclude.EDGE));
+    final boolean incUpper = (include.contains(FacetParams.FacetRangeInclude.UPPER) ||
+        include.contains(FacetParams.FacetRangeInclude.EDGE));
+    return new Range(FacetParams.FacetRangeOther.BETWEEN.toString(), start, the_end, incLower, incUpper);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 2ff3b57..42f8488 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -17,8 +17,6 @@
 package org.apache.solr.search.facet;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.EnumSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -27,19 +25,14 @@ import java.util.Optional;
 
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.FacetParams;
 import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.search.DocSet;
-import org.apache.solr.search.FunctionQParser;
 import org.apache.solr.search.JoinQParserPlugin;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.QueryContext;
 import org.apache.solr.search.SolrConstantScoreQuery;
-import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.search.join.GraphQuery;
 import org.apache.solr.search.join.GraphQueryParser;
@@ -203,6 +196,7 @@ public abstract class FacetRequest {
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                 "'join' domain change requires a map containing the 'from' and 'to' fields");
           }
+          @SuppressWarnings({"unchecked"})
           final Map<String,String> join = (Map<String,String>) queryJoin;
           if (! (join.containsKey("from") && join.containsKey("to") &&
               null != join.get("from") && null != join.get("to")) ) {
@@ -264,6 +258,7 @@ public abstract class FacetRequest {
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                                     "'graph' domain change requires a map containing the 'from' and 'to' fields");
           }
+          @SuppressWarnings({"unchecked"})
           final Map<String,String> graph = (Map<String,String>) queryGraph;
           if (! (graph.containsKey("from") && graph.containsKey("to") &&
                  null != graph.get("from") && null != graph.get("to")) ) {
@@ -306,6 +301,7 @@ public abstract class FacetRequest {
    * @param params a typed parameter structure (unlike SolrParams which are all string values).
    */
   public static FacetRequest parse(SolrQueryRequest req, Map<String, Object> params) {
+    @SuppressWarnings({"rawtypes"})
     FacetParser parser = new FacetTopParser(req);
     try {
       return parser.parse(params);
@@ -324,6 +320,7 @@ public abstract class FacetRequest {
    * @param params a typed parameter structure (unlike SolrParams which are all string values).
    */
   public static FacetRequest parseOneFacetReq(SolrQueryRequest req, Map<String, Object> params) {
+    @SuppressWarnings("rawtypes")
     FacetParser parser = new FacetTopParser(req);
     try {
       return (FacetRequest) parser.parseFacetOrStat("", params);
@@ -409,6 +406,7 @@ public abstract class FacetRequest {
 
   /** Process the request with the facet context settings, a parameter-object. */
   final Object process(FacetContext fcontext) throws IOException {
+    @SuppressWarnings("rawtypes")
     FacetProcessor facetProcessor = createFacetProcessor(fcontext);
 
     FacetDebugInfo debugInfo = fcontext.getDebugInfo();
@@ -432,437 +430,13 @@ public abstract class FacetRequest {
     return facetProcessor.getResponse(); 
   }
 
+  @SuppressWarnings("rawtypes")
   public abstract FacetProcessor createFacetProcessor(FacetContext fcontext);
 
   public abstract FacetMerger createFacetMerger(Object prototype);
   
   public abstract Map<String, Object> getFacetDescription();
 
-  static class FacetContext {
-    // Context info for actually executing a local facet command
-    public static final int IS_SHARD=0x01;
-    public static final int IS_REFINEMENT=0x02;
-    public static final int SKIP_FACET=0x04;  // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo
-
-    FacetProcessor processor;
-    Map<String,Object> facetInfo; // refinement info for this node
-    QueryContext qcontext;
-    SolrQueryRequest req;  // TODO: replace with params?
-    SolrIndexSearcher searcher;
-    Query filter;  // TODO: keep track of as a DocSet or as a Query?
-    DocSet base;
-    FacetContext parent;
-    int flags;
-    FacetDebugInfo debugInfo;
-
-    public void setDebugInfo(FacetDebugInfo debugInfo) {
-      this.debugInfo = debugInfo;
-    }
-
-    public FacetDebugInfo getDebugInfo() {
-      return debugInfo;
-    }
-
-    public boolean isShard() {
-      return (flags & IS_SHARD) != 0;
-    }
-
-    /**
-     * @param filter The filter for the bucket that resulted in this context/domain.  Can be null if this is the root context.
-     * @param domain The resulting set of documents for this facet.
-     */
-    public FacetContext sub(Query filter, DocSet domain) {
-      FacetContext ctx = new FacetContext();
-      ctx.parent = this;
-      ctx.base = domain;
-      ctx.filter = filter;
-
-      // carry over from parent
-      ctx.flags = flags;
-      ctx.qcontext = qcontext;
-      ctx.req = req;
-      ctx.searcher = searcher;
-
-      return ctx;
-    }
-  }
-
-  abstract static class FacetParser<FacetRequestT extends FacetRequest> {
-    protected FacetRequestT facet;
-    protected FacetParser parent;
-    protected String key;
-
-    public FacetParser(FacetParser parent, String key) {
-      this.parent = parent;
-      this.key = key;
-    }
-
-    public String getKey() {
-      return key;
-    }
-
-    public String getPathStr() {
-      if (parent == null) {
-        return "/" + key;
-      }
-      return parent.getKey() + "/" + key;
-    }
-
-    protected RuntimeException err(String msg) {
-      return new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg + " , path="+getPathStr());
-    }
-
-    public abstract FacetRequest parse(Object o) throws SyntaxError;
-
-    // TODO: put the FacetRequest on the parser object?
-    public void parseSubs(Object o) throws SyntaxError {
-      if (o==null) return;
-      if (o instanceof Map) {
-        Map<String,Object> m = (Map<String, Object>) o;
-        for (Map.Entry<String,Object> entry : m.entrySet()) {
-          String key = entry.getKey();
-          Object value = entry.getValue();
-
-          if ("processEmpty".equals(key)) {
-            facet.processEmpty = getBoolean(m, "processEmpty", false);
-            continue;
-          }
-
-          // "my_prices" : { "range" : { "field":...
-          // key="my_prices", value={"range":..
-
-          Object parsedValue = parseFacetOrStat(key, value);
-
-          // TODO: have parseFacetOrStat directly add instead of return?
-          if (parsedValue instanceof FacetRequest) {
-            facet.addSubFacet(key, (FacetRequest)parsedValue);
-          } else if (parsedValue instanceof AggValueSource) {
-            facet.addStat(key, (AggValueSource)parsedValue);
-          } else {
-            throw err("Unknown facet type key=" + key + " class=" + (parsedValue == null ? "null" : parsedValue.getClass().getName()));
-          }
-        }
-      } else {
-        // facet : my_field?
-        throw err("Expected map for facet/stat");
-      }
-    }
-
-    public Object parseFacetOrStat(String key, Object o) throws SyntaxError {
-
-      if (o instanceof String) {
-        return parseStringFacetOrStat(key, (String)o);
-      }
-
-      if (!(o instanceof Map)) {
-        throw err("expected Map but got " + o);
-      }
-
-      // The type can be in a one element map, or inside the args as the "type" field
-      // { "query" : "foo:bar" }
-      // { "range" : { "field":... } }
-      // { "type"  : range, field : myfield, ... }
-      Map<String,Object> m = (Map<String,Object>)o;
-      String type;
-      Object args;
-
-      if (m.size() == 1) {
-        Map.Entry<String,Object> entry = m.entrySet().iterator().next();
-        type = entry.getKey();
-        args = entry.getValue();
-        // throw err("expected facet/stat type name, like {range:{... but got " + m);
-      } else {
-        // type should be inside the map as a parameter
-        Object typeObj = m.get("type");
-        if (!(typeObj instanceof String)) {
-            throw err("expected facet/stat type name, like {type:range, field:price, ...} but got " + typeObj);
-        }
-        type = (String)typeObj;
-        args = m;
-      }
-
-      return parseFacetOrStat(key, type, args);
-    }
-
-    public Object parseFacetOrStat(String key, String type, Object args) throws SyntaxError {
-      // TODO: a place to register all these facet types?
-
-      switch (type) {
-        case "field":
-        case "terms":
-          return new FacetFieldParser(this, key).parse(args);
-        case "query":
-          return new FacetQueryParser(this, key).parse(args);
-        case "range":
-          return new FacetRangeParser(this, key).parse(args);
-        case "heatmap":
-          return new FacetHeatmap.Parser(this, key).parse(args);
-        case "func":
-          return parseStat(key, args);
-      }
-
-      throw err("Unknown facet or stat. key=" + key + " type=" + type + " args=" + args);
-    }
-
-    public Object parseStringFacetOrStat(String key, String s) throws SyntaxError {
-      // "avg(myfield)"
-      return parseStat(key, s);
-      // TODO - simple string representation of facets
-    }
-
-    /** Parses simple strings like "avg(x)" in the context of optional local params (may be null) */
-    private AggValueSource parseStatWithParams(String key, SolrParams localparams, String stat) throws SyntaxError {
-      SolrQueryRequest req = getSolrRequest();
-      FunctionQParser parser = new FunctionQParser(stat, localparams, req.getParams(), req);
-      AggValueSource agg = parser.parseAgg(FunctionQParser.FLAG_DEFAULT);
-      return agg;
-    }
-
-    /** Parses simple strings like "avg(x)" or robust Maps that may contain local params */
-    private AggValueSource parseStat(String key, Object args) throws SyntaxError {
-      assert null != args;
-
-      if (args instanceof CharSequence) {
-        // Both of these variants are already unpacked for us in this case, and use no local params...
-        // 1) x:{func:'min(foo)'}
-        // 2) x:'min(foo)'
-        return parseStatWithParams(key, null, args.toString());
-      }
-
-      if (args instanceof Map) {
-        final Map<String,Object> statMap = (Map<String,Object>)args;
-        return parseStatWithParams(key, jsonToSolrParams(statMap), statMap.get("func").toString());
-      }
-
-      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                              "Stats must be specified as either a simple string, or a json Map");
-
-    }
-
-
-    private Domain getDomain() {
-      if (facet.domain == null) {
-        facet.domain = new Domain();
-      }
-      return facet.domain;
-    }
-
-    protected void parseCommonParams(Object o) {
-      if (o instanceof Map) {
-        Map<String,Object> m = (Map<String,Object>)o;
-        List<String> excludeTags = getStringList(m, "excludeTags");
-        if (excludeTags != null) {
-          getDomain().excludeTags = excludeTags;
-        }
-
-        Object domainObj =  m.get("domain");
-        if (domainObj instanceof Map) {
-          Map<String, Object> domainMap = (Map<String, Object>)domainObj;
-          Domain domain = getDomain();
-
-          excludeTags = getStringList(domainMap, "excludeTags");
-          if (excludeTags != null) {
-            domain.excludeTags = excludeTags;
-          }
-
-          if (domainMap.containsKey("query")) {
-            domain.explicitQueries = parseJSONQueryStruct(domainMap.get("query"));
-            if (null == domain.explicitQueries) {
-              throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                                      "'query' domain can not be null or empty");
-            } else if (null != domain.excludeTags) {
-              throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                                      "'query' domain can not be combined with 'excludeTags'");
-            }
-          }
-
-          String blockParent = getString(domainMap, "blockParent", null);
-          String blockChildren = getString(domainMap, "blockChildren", null);
-
-          if (blockParent != null) {
-            domain.toParent = true;
-            domain.parents = blockParent;
-          } else if (blockChildren != null) {
-            domain.toChildren = true;
-            domain.parents = blockChildren;
-          }
-
-          Domain.JoinField.createJoinField(domain, domainMap);
-          Domain.GraphField.createGraphField(domain, domainMap);
-
-          Object filterOrList = domainMap.get("filter");
-          if (filterOrList != null) {
-            assert domain.filters == null;
-            domain.filters = parseJSONQueryStruct(filterOrList);
-          }
-
-        } else if (domainObj != null) {
-          throw err("Expected Map for 'domain', received " + domainObj.getClass().getSimpleName() + "=" + domainObj);
-        }
-      }
-    }
-
-    /** returns null on null input, otherwise returns a list of the JSON query structures -- either
-     * directly from the raw (list) input, or if raw input is a not a list then it encapsulates
-     * it in a new list.
-     */
-    private List<Object> parseJSONQueryStruct(Object raw) {
-      List<Object> result = null;
-      if (null == raw) {
-        return result;
-      } else if (raw instanceof List) {
-        result = (List<Object>) raw;
-      } else {
-        result = new ArrayList<>(1);
-        result.add(raw);
-      }
-      return result;
-    }
-
-    public String getField(Map<String,Object> args) {
-      Object fieldName = args.get("field"); // TODO: pull out into defined constant
-      if (fieldName == null) {
-        fieldName = args.get("f");  // short form
-      }
-      if (fieldName == null) {
-        throw err("Missing 'field'");
-      }
-
-      if (!(fieldName instanceof String)) {
-        throw err("Expected string for 'field', got" + fieldName);
-      }
-
-      return (String)fieldName;
-    }
-
-
-    public Long getLongOrNull(Map<String,Object> args, String paramName, boolean required) {
-      Object o = args.get(paramName);
-      if (o == null) {
-        if (required) {
-          throw err("Missing required parameter '" + paramName + "'");
-        }
-        return null;
-      }
-      if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
-        throw err("Expected integer type for param '"+paramName + "' but got " + o);
-      }
-
-      return ((Number)o).longValue();
-    }
-
-    public long getLong(Map<String,Object> args, String paramName, long defVal) {
-      Object o = args.get(paramName);
-      if (o == null) {
-        return defVal;
-      }
-      if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
-        throw err("Expected integer type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
-      }
-
-      return ((Number)o).longValue();
-    }
-
-    public Double getDoubleOrNull(Map<String,Object> args, String paramName, boolean required) {
-      Object o = args.get(paramName);
-      if (o == null) {
-        if (required) {
-          throw err("Missing required parameter '" + paramName + "'");
-        }
-        return null;
-      }
-      if (!(o instanceof Number)) {
-        throw err("Expected double type for param '" + paramName + "' but got " + o);
-      }
-
-      return ((Number)o).doubleValue();
-    }
-
-    public boolean getBoolean(Map<String,Object> args, String paramName, boolean defVal) {
-      Object o = args.get(paramName);
-      if (o == null) {
-        return defVal;
-      }
-      // TODO: should we be more flexible and accept things like "true" (strings)?
-      // Perhaps wait until the use case comes up.
-      if (!(o instanceof Boolean)) {
-        throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
-      }
-
-      return (Boolean)o;
-    }
-
-    public Boolean getBooleanOrNull(Map<String, Object> args, String paramName) {
-      Object o = args.get(paramName);
-
-      if (o != null && !(o instanceof Boolean)) {
-        throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
-      }
-      return (Boolean) o;
-    }
-
-
-    public String getString(Map<String,Object> args, String paramName, String defVal) {
-      Object o = args.get(paramName);
-      if (o == null) {
-        return defVal;
-      }
-      if (!(o instanceof String)) {
-        throw err("Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
-      }
-
-      return (String)o;
-    }
-
-    public Object getVal(Map<String, Object> args, String paramName, boolean required) {
-      Object o = args.get(paramName);
-      if (o == null && required) {
-        throw err("Missing required parameter: '" + paramName + "'");
-      }
-      return o;
-    }
-
-    public List<String> getStringList(Map<String,Object> args, String paramName) {
-      return getStringList(args, paramName, true);
-    }
-
-    public List<String> getStringList(Map<String, Object> args, String paramName, boolean decode) {
-      Object o = args.get(paramName);
-      if (o == null) {
-        return null;
-      }
-      if (o instanceof List) {
-        return (List<String>)o;
-      }
-      if (o instanceof String) {
-        // TODO: SOLR-12539 handle spaces in b/w comma & value ie, should the values be trimmed before returning??
-        return StrUtils.splitSmart((String)o, ",", decode);
-      }
-
-      throw err("Expected list of string or comma separated string values for '" + paramName +
-          "', received " + o.getClass().getSimpleName() + "=" + o);
-    }
-
-    public IndexSchema getSchema() {
-      return parent.getSchema();
-    }
-
-    public SolrQueryRequest getSolrRequest() {
-      return parent.getSolrRequest();
-    }
-
-    /**
-     * Helper that handles the possibility of map values being lists
-     * NOTE: does *NOT* fail on map values that are sub-maps (ie: nested json objects)
-     */
-    public static SolrParams jsonToSolrParams(Map jsonObject) {
-      // HACK, but NamedList already handles the list processing for us...
-      NamedList<String> nl = new NamedList<>();
-      nl.addAll(jsonObject);
-      return SolrParams.toSolrParams(nl);
-    }
-  }
-
   static class FacetTopParser extends FacetParser<FacetQuery> {
     private SolrQueryRequest req;
 
@@ -890,7 +464,7 @@ public abstract class FacetRequest {
   }
 
   static class FacetQueryParser extends FacetParser<FacetQuery> {
-    public FacetQueryParser(FacetParser parent, String key) {
+    public FacetQueryParser(@SuppressWarnings("rawtypes") FacetParser parent, String key) {
       super(parent, key);
       facet = new FacetQuery();
     }
@@ -905,6 +479,7 @@ public abstract class FacetRequest {
         qstring = (String)arg;
 
       } else if (arg instanceof Map) {
+        @SuppressWarnings({"unchecked"})
         Map<String, Object> m = (Map<String, Object>) arg;
         qstring = getString(m, "q", null);
         if (qstring == null) {
@@ -959,6 +534,7 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
  ***/
 
   static class FacetFieldParser extends FacetParser<FacetField> {
+    @SuppressWarnings({"rawtypes"})
     public FacetFieldParser(FacetParser parent, String key) {
       super(parent, key);
       facet = new FacetField();
@@ -971,6 +547,7 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
         facet.field = (String)arg;
 
       } else if (arg instanceof Map) {
+        @SuppressWarnings({"unchecked"})
         Map<String, Object> m = (Map<String, Object>) arg;
         facet.field = getField(m);
         facet.offset = getLong(m, "offset", facet.offset);
@@ -1049,6 +626,7 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
         }
       } else if (sort instanceof Map) {
         // { myvar : 'desc' }
+        @SuppressWarnings("unchecked")
         Optional<Map.Entry<String,Object>> optional = ((Map<String,Object>)sort).entrySet().stream().findFirst();
         if (optional.isPresent()) {
           Map.Entry<String, Object> entry = optional.get();
@@ -1075,55 +653,6 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
 
   }
 
-  static class FacetRangeParser extends FacetParser<FacetRange> {
-    public FacetRangeParser(FacetParser parent, String key) {
-      super(parent, key);
-      facet = new FacetRange();
-    }
-
-    public FacetRange parse(Object arg) throws SyntaxError {
-      parseCommonParams(arg);
-
-      if (!(arg instanceof Map)) {
-        throw err("Missing range facet arguments");
-      }
-
-      Map<String, Object> m = (Map<String, Object>) arg;
-
-      facet.field = getString(m, "field", null);
-      facet.ranges = getVal(m, "ranges", false);
-
-      boolean required = facet.ranges == null;
-      facet.start = getVal(m, "start", required);
-      facet.end = getVal(m, "end", required);
-      facet.gap = getVal(m, "gap", required);
-      facet.hardend = getBoolean(m, "hardend", facet.hardend);
-      facet.mincount = getLong(m, "mincount", 0);
-
-      // TODO: refactor list-of-options code
-
-      List<String> list = getStringList(m, "include", false);
-      String[] includeList = null;
-      if (list != null) {
-        includeList = list.toArray(new String[list.size()]);
-      }
-      facet.include = FacetParams.FacetRangeInclude.parseParam( includeList );
-      facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);
-
-      List<String> other = getStringList(m, "other", false);
-      if (other != null) {
-        for (String otherStr : other) {
-          facet.others.add( FacetParams.FacetRangeOther.get(otherStr) );
-        }
-      }
-
-      Object facetObj = m.get("facet");
-      parseSubs(facetObj);
-
-      return facet;
-    }
-
-  }
 }
 
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java
new file mode 100644
index 0000000..ee67fca
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSorted.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+// Any type of facet request that generates a variable number of buckets
+// and the ability to sort by those generated buckets.
+abstract class FacetRequestSorted extends FacetRequest {
+  long offset;
+  long limit;
+  /**
+   * Number of buckets to request beyond the limit to do internally during initial distributed search.
+   * -1 means default heuristic.
+   */
+  int overrequest = -1;
+  /**
+   * Number of buckets to fill in beyond the limit to do internally during refinement of distributed search.
+   * -1 means default heuristic.
+   */
+  int overrefine = -1;
+  long mincount;
+  /**
+   * The basic sorting to do on buckets, defaults to {@link FacetRequest.FacetSort#COUNT_DESC}
+   * @see #prelim_sort
+   */
+  FacetSort sort;
+  /**
+   * An optional "Pre-Sort" that defaults to null.
+   * If specified, then the <code>prelim_sort</code> is used as an optimization in place of {@link #sort}
+   * during collection, and the full {@link #sort} values are only computed for the top candidate buckets
+   * (after refinement)
+   */
+  FacetSort prelim_sort;
+  RefineMethod refine; // null, NONE, or SIMPLE
+
+  @Override
+  public RefineMethod getRefineMethod() {
+    return refine;
+  }
+
+  @Override
+  public boolean returnsPartial() {
+    return super.returnsPartial() || (limit > 0);
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java
index c1e5631..cdaa5f2 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequestSortedMerger.java
@@ -31,7 +31,7 @@ import java.util.Map;
 import org.apache.solr.common.util.SimpleOrderedMap;
 
 // base class for facets that create a list of buckets that can be sorted
-abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted> extends FacetBucketMerger<FacetRequestT> {
+abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted> extends FacetModule.FacetBucketMerger<FacetRequestT> {
   LinkedHashMap<Object,FacetBucket> buckets = new LinkedHashMap<>();
   List<FacetBucket> sortedBuckets;
   BitSet shardHasMoreBuckets;  // null, or "true" if we saw a result from this shard and it indicated that there are more results
@@ -44,6 +44,7 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
   @Override
   public void merge(Object facetResult, Context mcontext) {
     this.mcontext = mcontext;
+    @SuppressWarnings({"rawtypes"})
     SimpleOrderedMap res = (SimpleOrderedMap)facetResult;
     Boolean more = (Boolean)res.get("more");
     if (more != null && more) {
@@ -57,16 +58,18 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
 
   private static class SortVal implements Comparable<SortVal> {
     FacetBucket bucket;
-    FacetSortableMerger merger;  // make this class inner and access merger , direction in parent?
+    FacetModule.FacetSortableMerger merger;  // make this class inner and access merger , direction in parent?
     FacetRequest.SortDirection direction;
 
     @Override
+    @SuppressWarnings({"unchecked"})
     public int compareTo(SortVal o) {
       int c = -merger.compareTo(o.merger, direction) * direction.getMultiplier();
       return c == 0 ? bucket.bucketValue.compareTo(o.bucket.bucketValue) : c;
     }
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void mergeBucketList(List<SimpleOrderedMap> bucketList, Context mcontext) {
     for (SimpleOrderedMap bucketRes : bucketList) {
       Comparable bucketVal = (Comparable)bucketRes.get("val");
@@ -80,6 +83,7 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
   }
 
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void sortBuckets(final FacetRequest.FacetSort sort) {
     // NOTE: we *always* re-init from buckets, because it may have been modified post-refinement 
     sortedBuckets = new ArrayList<>( buckets.values() );
@@ -140,7 +144,7 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
         if (merger != null) {
           SortVal sv = new SortVal();
           sv.bucket = bucket;
-          sv.merger = (FacetSortableMerger)merger;
+          sv.merger = (FacetModule.FacetSortableMerger)merger;
           sv.direction = direction;
           // sv.pos = i;  // if we need position in the future...
           lst.add(sv);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java b/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java
index 222d4a3..665bdd9 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/HLLAgg.java
@@ -50,7 +50,7 @@ public class HLLAgg extends StrAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     SchemaField sf = fcontext.qcontext.searcher().getSchema().getField(getArg());
     if (sf.multiValued() || sf.getType().multiValuedFieldCache()) {
       if (sf.getType().isPointField()) {
@@ -75,7 +75,7 @@ public class HLLAgg extends StrAggValueSource {
     return new Merger();
   }
 
-  private static class Merger extends FacetSortableMerger {
+  private static class Merger extends FacetModule.FacetSortableMerger {
     HLL aggregate = null;
     long answer = -1; // -1 means unset
 
@@ -86,6 +86,9 @@ public class HLLAgg extends StrAggValueSource {
         return;
       }
 
+
+
+      @SuppressWarnings({"rawtypes"})
       SimpleOrderedMap map = (SimpleOrderedMap)facetResult;
       byte[] serialized = ((byte[])map.get("hll"));
       HLL subHLL = HLL.fromBytes(serialized);
@@ -109,7 +112,7 @@ public class HLLAgg extends StrAggValueSource {
     }
 
     @Override
-    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+    public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
       return Long.compare( getLong(), ((Merger)other).getLong() );
     }
   }
@@ -121,7 +124,7 @@ public class HLLAgg extends StrAggValueSource {
   abstract class BaseNumericAcc extends DocValuesAcc {
     HLL[] sets;
 
-    public BaseNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
+    public BaseNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
       super(fcontext, fcontext.qcontext.searcher().getSchema().getField(field));
       sets = new HLL[numSlots];
     }
@@ -160,6 +163,7 @@ public class HLLAgg extends StrAggValueSource {
       return set == null ? 0 : set.cardinality();
     }
 
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public Object getShardValue(int slot) throws IOException {
       HLL hll = sets[slot];
       if (hll == null) return NO_VALUES;
@@ -179,7 +183,7 @@ public class HLLAgg extends StrAggValueSource {
   class NumericAcc extends BaseNumericAcc {
     NumericDocValues values;
 
-    public NumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
+    public NumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
       super(fcontext, field, numSlots);
     }
 
@@ -205,7 +209,7 @@ public class HLLAgg extends StrAggValueSource {
   class SortedNumericAcc extends BaseNumericAcc {
     SortedNumericDocValues values;
 
-    public SortedNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
+    public SortedNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
       super(fcontext, field, numSlots);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java b/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
index 7c2bde4..fcba43c 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/LegacyFacet.java
@@ -270,6 +270,7 @@ public class LegacyFacet {
     getCurrentSubs().put(key, sub);
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private Map<String,Object> getCurrentSubs() {
     if (currentSubs == null) {
       currentSubs = new LinkedHashMap();
diff --git a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java
index a9c6b44..828fbe6 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java
@@ -46,7 +46,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     SchemaField sf = null;
@@ -116,7 +116,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
   }
 
   // TODO: can this be replaced by ComparableMerger?
-  private class NumericMerger extends FacetDoubleMerger {
+  private class NumericMerger extends FacetModule.FacetDoubleMerger {
     double val = Double.NaN;
 
     @Override
@@ -133,9 +133,11 @@ public class MinMaxAgg extends SimpleAggValueSource {
     }
   }
 
-  private class ComparableMerger extends FacetSortableMerger {
+  private class ComparableMerger extends FacetModule.FacetSortableMerger {
+    @SuppressWarnings("rawtypes")
     Comparable val;
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public void merge(Object facetResult, Context mcontext) {
       Comparable other = (Comparable)facetResult;
       if (val == null) {
@@ -153,7 +155,8 @@ public class MinMaxAgg extends SimpleAggValueSource {
     }
 
     @Override
-    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+    @SuppressWarnings({"unchecked"})
+    public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
       // NOTE: we don't use the minmax multiplier here because we still want natural ordering between slots (i.e. min(field) asc and max(field) asc) both sort "A" before "Z")
       return this.val.compareTo(((ComparableMerger)other).val);
     }
@@ -164,7 +167,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
     private int currentSlot;
     int[] result;
 
-    public MinMaxUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public MinMaxUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
       result = new int[numSlots];
       Arrays.fill(result, MISSING);
@@ -233,8 +236,8 @@ public class MinMaxAgg extends SimpleAggValueSource {
     }
   }
 
-  class DFuncAcc extends DoubleFuncSlotAcc {
-    public DFuncAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
+  class DFuncAcc extends SlotAcc.DoubleFuncSlotAcc {
+    public DFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) {
       super(values, fcontext, numSlots, Double.NaN);
     }
 
@@ -260,9 +263,9 @@ public class MinMaxAgg extends SimpleAggValueSource {
     }
   }
 
-  class LFuncAcc extends LongFuncSlotAcc {
+  class LFuncAcc extends SlotAcc.LongFuncSlotAcc {
     FixedBitSet exists;
-    public LFuncAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
+    public LFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) {
       super(values, fcontext, numSlots, 0);
       exists = new FixedBitSet(numSlots);
     }
@@ -320,9 +323,9 @@ public class MinMaxAgg extends SimpleAggValueSource {
 
   }
 
-  class DateFuncAcc extends LongFuncSlotAcc {
+  class DateFuncAcc extends SlotAcc.LongFuncSlotAcc {
     private static final long MISSING = Long.MIN_VALUE;
-    public DateFuncAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
+    public DateFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) {
       super(values, fcontext, numSlots, MISSING);
     }
 
@@ -351,7 +354,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
     SchemaField field;
     int[] slotOrd;
 
-    public OrdAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
+    public OrdAcc(FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
       super(fcontext);
       this.field = field;
       slotOrd = new int[numSlots];
@@ -394,7 +397,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
     LongValues toGlobal;
     SortedDocValues subDv;
 
-    public SingleValuedOrdAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
+    public SingleValuedOrdAcc(FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
       super(fcontext, field, numSlots);
     }
 
@@ -450,7 +453,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
     SortedSetDocValues subDv;
     long[] slotOrd;
 
-    public MinMaxSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
+    public MinMaxSortedSetDVAcc(FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
       super(fcontext, field);
       this.slotOrd = new long[numSlots];
       Arrays.fill(slotOrd, MISSING);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java b/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java
index 55e4d69..b1b630a 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/MissingAgg.java
@@ -37,7 +37,7 @@ public class MissingAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -61,12 +61,12 @@ public class MissingAgg extends SimpleAggValueSource {
 
   @Override
   public FacetMerger createFacetMerger(Object prototype) {
-    return new FacetLongMerger();
+    return new FacetModule.FacetLongMerger();
   }
 
-  class MissingSlotAcc extends LongFuncSlotAcc {
+  class MissingSlotAcc extends SlotAcc.LongFuncSlotAcc {
 
-    public MissingSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
+    public MissingSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
       super(values, fcontext, numSlots, 0);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java b/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java
index 3f855b1..b645b7f 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/PercentileAgg.java
@@ -49,7 +49,7 @@ public class PercentileAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -132,12 +132,12 @@ public class PercentileAgg extends SimpleAggValueSource {
     return lst;
   }
 
-  class Acc extends FuncSlotAcc {
+  class Acc extends SlotAcc.FuncSlotAcc {
     protected AVLTreeDigest[] digests;
     protected ByteBuffer buf;
     protected double[] sortvals;
 
-    public Acc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
+    public Acc(ValueSource values, FacetContext fcontext, int numSlots) {
       super(values, fcontext, numSlots);
       digests = new AVLTreeDigest[numSlots];
     }
@@ -220,7 +220,7 @@ public class PercentileAgg extends SimpleAggValueSource {
     protected ByteBuffer buf;
     double[] sortvals;
 
-    public BasePercentileDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public BasePercentileDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf);
       digests = new AVLTreeDigest[numSlots];
     }
@@ -289,7 +289,7 @@ public class PercentileAgg extends SimpleAggValueSource {
   class PercentileSortedNumericAcc extends BasePercentileDVAcc {
     SortedNumericDocValues values;
 
-    public PercentileSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public PercentileSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
@@ -339,7 +339,7 @@ public class PercentileAgg extends SimpleAggValueSource {
   class PercentileSortedSetAcc extends BasePercentileDVAcc {
     SortedSetDocValues values;
 
-    public PercentileSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public PercentileSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
@@ -376,7 +376,7 @@ public class PercentileAgg extends SimpleAggValueSource {
     protected double[] sortvals;
     private int currentSlot;
 
-    public PercentileUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public PercentileUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
       digests = new AVLTreeDigest[numSlots];
     }
@@ -465,7 +465,7 @@ public class PercentileAgg extends SimpleAggValueSource {
     }
   }
 
-  class Merger extends FacetSortableMerger {
+  class Merger extends FacetModule.FacetSortableMerger {
     protected AVLTreeDigest digest;
     protected Double sortVal;
 
@@ -488,7 +488,7 @@ public class PercentileAgg extends SimpleAggValueSource {
     }
 
     @Override
-    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+    public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
       return Double.compare(getSortVal(), ((Merger) other).getSortVal());
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java b/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java
index c8153a9..a9a3d28 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/RelatednessAgg.java
@@ -117,12 +117,12 @@ public class RelatednessAgg extends AggValueSource {
   }
 
   @Override
-  public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
+  public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, LeafReaderContext readerContext) throws IOException {
     throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this);
   }
 
 
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     // TODO: Ideally this is where we should check fgQ/bgQ for 'null' and apply defaults...
     //
     // we want to walk up the fcontext and inherit the queries from any ancestor SKGAgg
@@ -135,7 +135,7 @@ public class RelatednessAgg extends AggValueSource {
     
     List<Query> fgFilters = new ArrayList<Query>(3);
     fgFilters.add(fgQ);
-    for (FacetRequest.FacetContext ctx = fcontext; ctx != null; ctx = ctx.parent) {
+    for (FacetContext ctx = fcontext; ctx != null; ctx = ctx.parent) {
       if (null != ctx.filter) {
         fgFilters.add(ctx.filter);
       } else {
@@ -170,7 +170,7 @@ public class RelatednessAgg extends AggValueSource {
     private final DocSet bgSet;
     private final long fgSize;
     private final long bgSize;
-    public SKGSlotAcc(final RelatednessAgg agg, final FacetRequest.FacetContext fcontext, final int numSlots,
+    public SKGSlotAcc(final RelatednessAgg agg, final FacetContext fcontext, final int numSlots,
                       final DocSet fgSet, final DocSet bgSet) throws IOException {
       super(fcontext);
       this.agg = agg;
@@ -253,6 +253,7 @@ public class RelatednessAgg extends AggValueSource {
         slotVal.incSizes(fgSize, bgSize);
       }
 
+      @SuppressWarnings({"rawtypes"})
       SimpleOrderedMap res = slotVal.externalize(fcontext.isShard());
       return res;
     }
@@ -403,6 +404,8 @@ public class RelatednessAgg extends AggValueSource {
      * @see SlotAcc#getValue
      * @see Merger#getMergedResult
      */
+
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public SimpleOrderedMap externalize(final boolean isShardRequest) {
       SimpleOrderedMap result = new SimpleOrderedMap<Number>();
       
@@ -429,7 +432,7 @@ public class RelatednessAgg extends AggValueSource {
   /**
    * Merges in the per shard {@link BucketData} output into a unified {@link BucketData}
    */
-  private static final class Merger extends FacetSortableMerger {
+  private static final class Merger extends FacetModule.FacetSortableMerger {
     private final BucketData mergedData;
     public Merger(final RelatednessAgg agg) {
       this.mergedData = new BucketData(agg);
@@ -437,13 +440,14 @@ public class RelatednessAgg extends AggValueSource {
     
     @Override
     public void merge(Object facetResult, Context mcontext) {
+      @SuppressWarnings({"unchecked"})
       NamedList<Object> shardData = (NamedList<Object>)facetResult;
       mergedData.incSizes((Long)shardData.remove(FG_SIZE), (Long)shardData.remove(BG_SIZE));
       mergedData.incCounts((Long)shardData.remove(FG_COUNT), (Long)shardData.remove(BG_COUNT));
     }
 
     @Override
-    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+    public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
       // NOTE: regardless of the SortDirection hint, we want normal comparison of the BucketData
       
       assert other instanceof Merger;
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java b/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java
index 661ec83..afc506c 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SimpleAggValueSource.java
@@ -35,6 +35,7 @@ public abstract class SimpleAggValueSource extends AggValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     throw new UnsupportedOperationException();
   }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
index a241a66..2ab93cc 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SlotAcc.java
@@ -44,11 +44,11 @@ import org.apache.solr.search.SolrIndexSearcher;
  */
 public abstract class SlotAcc implements Closeable {
   String key; // todo...
-  protected final FacetRequest.FacetContext fcontext;
+  protected final FacetContext fcontext;
   protected LeafReaderContext currentReaderContext;
   protected int currentDocBase;
 
-  public SlotAcc(FacetRequest.FacetContext fcontext) {
+  public SlotAcc(FacetContext fcontext) {
     this.fcontext = fcontext;
   }
 
@@ -68,28 +68,28 @@ public abstract class SlotAcc implements Closeable {
   }
 
   /**
-   * All subclasses must override this method to collect documents.  This method is called by the 
-   * default impl of {@link #collect(DocSet,int,IntFunction)} but it's also neccessary if this accumulator 
+   * All subclasses must override this method to collect documents.  This method is called by the
+   * default impl of {@link #collect(DocSet, int, IntFunction)} but it's also neccessary if this accumulator
    * is used for sorting.
    *
-   * @param doc Single Segment docId (relative to the current {@link LeafReaderContext} to collect
-   * @param slot The slot number to collect this document in
-   * @param slotContext A callback that can be used for Accumulators that would like additional info 
-   *        about the current slot -- the {@link IntFunction} is only garunteed to be valid for 
-   *        the current slot, and the {@link SlotContext} returned is only valid for the duration 
-   *        of the <code>collect()</code> call.
+   * @param doc         Single Segment docId (relative to the current {@link LeafReaderContext} to collect
+   * @param slot        The slot number to collect this document in
+   * @param slotContext A callback that can be used for Accumulators that would like additional info
+   *                    about the current slot -- the {@link IntFunction} is only garunteed to be valid for
+   *                    the current slot, and the {@link SlotContext} returned is only valid for the duration
+   *                    of the <code>collect()</code> call.
    */
   public abstract void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException;
 
   /**
-   * Bulk collection of all documents in a slot.  The default implementation calls {@link #collect(int,int,IntFunction)}
+   * Bulk collection of all documents in a slot.  The default implementation calls {@link #collect(int, int, IntFunction)}
    *
-   * @param docs (global) Documents to collect
-   * @param slot The slot number to collect these documents in
-   * @param slotContext A callback that can be used for Accumulators that would like additional info 
-   *        about the current slot -- the {@link IntFunction} is only garunteed to be valid for 
-   *        the current slot, and the {@link SlotContext} returned is only valid for the duration 
-   *        of the <code>collect()</code> call.
+   * @param docs        (global) Documents to collect
+   * @param slot        The slot number to collect these documents in
+   * @param slotContext A callback that can be used for Accumulators that would like additional info
+   *                    about the current slot -- the {@link IntFunction} is only garunteed to be valid for
+   *                    the current slot, and the {@link SlotContext} returned is only valid for the duration
+   *                    of the <code>collect()</code> call.
    */
   public int collect(DocSet docs, int slot, IntFunction<SlotContext> slotContext) throws IOException {
     int count = 0;
@@ -101,7 +101,7 @@ public abstract class SlotAcc implements Closeable {
     int segBase = 0;
     int segMax;
     int adjustedMax = 0;
-    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) {
+    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
       final int doc = docsIt.nextDoc();
       if (doc >= adjustedMax) {
         do {
@@ -135,16 +135,24 @@ public abstract class SlotAcc implements Closeable {
     }
   }
 
-  /** Called to reset the acc to a fresh state, ready for reuse */
+  /**
+   * Called to reset the acc to a fresh state, ready for reuse
+   */
   public abstract void reset() throws IOException;
 
-  /** Typically called from setNextReader to reset docValue iterators */
-  protected void resetIterators() throws IOException {};
+  /**
+   * Typically called from setNextReader to reset docValue iterators
+   */
+  protected void resetIterators() throws IOException {
+  }
+
+  ;
 
   public abstract void resize(Resizer resizer);
 
   @Override
-  public void close() throws IOException {}
+  public void close() throws IOException {
+  }
 
   public static abstract class Resizer {
     public abstract int getNewSize();
@@ -206,7 +214,7 @@ public abstract class SlotAcc implements Closeable {
       FixedBitSet values = new FixedBitSet(getNewSize());
       int oldSize = old.length();
 
-      for(int oldSlot = 0;;) {
+      for (int oldSlot = 0; ; ) {
         oldSlot = values.nextSetBit(oldSlot);
         if (oldSlot == DocIdSetIterator.NO_MORE_DOCS) break;
         int newSlot = getNewSlot(oldSlot);
@@ -218,6 +226,7 @@ public abstract class SlotAcc implements Closeable {
     }
 
     public <T> T[] resize(T[] old, T defaultValue) {
+      @SuppressWarnings({"unchecked"})
       T[] values = (T[]) Array.newInstance(old.getClass().getComponentType(), getNewSize());
       if (defaultValue != null) {
         Arrays.fill(values, 0, values.length, defaultValue);
@@ -237,37 +246,39 @@ public abstract class SlotAcc implements Closeable {
   } // end class Resizer
 
   /**
-   * Incapsulates information about the current slot, for Accumulators that may want 
+   * Incapsulates information about the current slot, for Accumulators that may want
    * additional info during collection.
    */
   public static final class SlotContext {
     private final Query slotQuery;
+
     public SlotContext(Query slotQuery) {
       this.slotQuery = slotQuery;
     }
+
     public Query getSlotQuery() {
       return slotQuery;
     }
   }
-}
 
-// TODO: we should really have a decoupled value provider...
+
+  // TODO: we should really have a decoupled value provider...
 // This would enhance reuse and also prevent multiple lookups of same value across diff stats
-abstract class FuncSlotAcc extends SlotAcc {
-  protected final ValueSource valueSource;
-  protected FunctionValues values;
+  abstract static class FuncSlotAcc extends SlotAcc {
+    protected final ValueSource valueSource;
+    protected FunctionValues values;
 
-  public FuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    super(fcontext);
-    this.valueSource = values;
-  }
+    public FuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      super(fcontext);
+      this.valueSource = values;
+    }
 
-  @Override
-  public void setNextReader(LeafReaderContext readerContext) throws IOException {
-    super.setNextReader(readerContext);
-    values = valueSource.getValues(fcontext.qcontext, readerContext);
+    @Override
+    public void setNextReader(LeafReaderContext readerContext) throws IOException {
+      super.setNextReader(readerContext);
+      values = valueSource.getValues(fcontext.qcontext, readerContext);
+    }
   }
-}
 
 // have a version that counts the number of times a Slot has been hit? (for avg... what else?)
 
@@ -275,393 +286,395 @@ abstract class FuncSlotAcc extends SlotAcc {
 // double-slot-func -> func-slot -> slot -> acc
 // double-slot-func -> double-slot -> slot -> acc
 
-abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
-  double[] result; // TODO: use DoubleArray
-  double initialValue;
+  abstract static class DoubleFuncSlotAcc extends FuncSlotAcc {
+    double[] result; // TODO: use DoubleArray
+    double initialValue;
 
-  public DoubleFuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    this(values, fcontext, numSlots, 0);
-  }
+    public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      this(values, fcontext, numSlots, 0);
+    }
 
-  public DoubleFuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots, double initialValue) {
-    super(values, fcontext, numSlots);
-    this.initialValue = initialValue;
-    result = new double[numSlots];
-    if (initialValue != 0) {
-      reset();
+    public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, double initialValue) {
+      super(values, fcontext, numSlots);
+      this.initialValue = initialValue;
+      result = new double[numSlots];
+      if (initialValue != 0) {
+        reset();
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    return result[slot];
-  }
+    @Override
+    public Object getValue(int slot) {
+      return result[slot];
+    }
 
-  @Override
-  public void reset() {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    result = resizer.resize(result, initialValue);
+    @Override
+    public void resize(Resizer resizer) {
+      result = resizer.resize(result, initialValue);
+    }
   }
-}
 
-abstract class LongFuncSlotAcc extends FuncSlotAcc {
-  long[] result;
-  long initialValue;
+  abstract static class LongFuncSlotAcc extends FuncSlotAcc {
+    long[] result;
+    long initialValue;
 
-  public LongFuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots, long initialValue) {
-    super(values, fcontext, numSlots);
-    this.initialValue = initialValue;
-    result = new long[numSlots];
-    if (initialValue != 0) {
-      reset();
+    public LongFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, long initialValue) {
+      super(values, fcontext, numSlots);
+      this.initialValue = initialValue;
+      result = new long[numSlots];
+      if (initialValue != 0) {
+        reset();
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Long.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Long.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    return result[slot];
-  }
+    @Override
+    public Object getValue(int slot) {
+      return result[slot];
+    }
 
-  @Override
-  public void reset() {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    result = resizer.resize(result, initialValue);
+    @Override
+    public void resize(Resizer resizer) {
+      result = resizer.resize(result, initialValue);
+    }
   }
-}
 
-abstract class IntSlotAcc extends SlotAcc {
-  int[] result; // use LongArray32
-  int initialValue;
+  abstract class IntSlotAcc extends SlotAcc {
+    int[] result; // use LongArray32
+    int initialValue;
 
-  public IntSlotAcc(FacetRequest.FacetContext fcontext, int numSlots, int initialValue) {
-    super(fcontext);
-    this.initialValue = initialValue;
-    result = new int[numSlots];
-    if (initialValue != 0) {
-      reset();
+    public IntSlotAcc(FacetContext fcontext, int numSlots, int initialValue) {
+      super(fcontext);
+      this.initialValue = initialValue;
+      result = new int[numSlots];
+      if (initialValue != 0) {
+        reset();
+      }
     }
-  }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Integer.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Integer.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    return result[slot];
-  }
+    @Override
+    public Object getValue(int slot) {
+      return result[slot];
+    }
 
-  @Override
-  public void reset() {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    result = resizer.resize(result, initialValue);
+    @Override
+    public void resize(Resizer resizer) {
+      result = resizer.resize(result, initialValue);
+    }
   }
-}
 
-class SumSlotAcc extends DoubleFuncSlotAcc {
-  public SumSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    super(values, fcontext, numSlots);
-  }
+  static class SumSlotAcc extends DoubleFuncSlotAcc {
+    public SumSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      super(values, fcontext, numSlots);
+    }
 
-  public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
-    double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it?
-    result[slotNum] += val;
+    public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
+      double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it?
+      result[slotNum] += val;
+    }
   }
-}
 
-class SumsqSlotAcc extends DoubleFuncSlotAcc {
-  public SumsqSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    super(values, fcontext, numSlots);
-  }
+  static class SumsqSlotAcc extends DoubleFuncSlotAcc {
+    public SumsqSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      super(values, fcontext, numSlots);
+    }
 
-  @Override
-  public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
-    double val = values.doubleVal(doc);
-    val = val * val;
-    result[slotNum] += val;
+    @Override
+    public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
+      double val = values.doubleVal(doc);
+      val = val * val;
+      result[slotNum] += val;
+    }
   }
-}
 
 
-class AvgSlotAcc extends DoubleFuncSlotAcc {
-  int[] counts;
+  static class AvgSlotAcc extends DoubleFuncSlotAcc {
+    int[] counts;
 
-  public AvgSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    super(values, fcontext, numSlots);
-    counts = new int[numSlots];
-  }
+    public AvgSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      super(values, fcontext, numSlots);
+      counts = new int[numSlots];
+    }
 
-  @Override
-  public void reset() {
-    super.reset();
-    for (int i = 0; i < counts.length; i++) {
-      counts[i] = 0;
+    @Override
+    public void reset() {
+      super.reset();
+      for (int i = 0; i < counts.length; i++) {
+        counts[i] = 0;
+      }
     }
-  }
 
-  @Override
-  public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
-    double val = values.doubleVal(doc);
-    if (val != 0 || values.exists(doc)) {
-      result[slotNum] += val;
-      counts[slotNum] += 1;
+    @Override
+    public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
+      double val = values.doubleVal(doc);
+      if (val != 0 || values.exists(doc)) {
+        result[slotNum] += val;
+        counts[slotNum] += 1;
+      }
     }
-  }
 
-  private double avg(int slot) {
-    return AggUtil.avg(result[slot], counts[slot]); // calc once and cache in result?
-  }
+    private double avg(int slot) {
+      return AggUtil.avg(result[slot], counts[slot]); // calc once and cache in result?
+    }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(avg(slotA), avg(slotB));
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(avg(slotA), avg(slotB));
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    if (fcontext.isShard()) {
-      ArrayList lst = new ArrayList(2);
-      lst.add(counts[slot]);
-      lst.add(result[slot]);
-      return lst;
-    } else {
-      return avg(slot);
+    @Override
+    public Object getValue(int slot) {
+      if (fcontext.isShard()) {
+      ArrayList<Object> lst = new ArrayList<>(2);
+        lst.add(counts[slot]);
+        lst.add(result[slot]);
+        return lst;
+      } else {
+        return avg(slot);
+      }
     }
-  }
 
-  @Override
-  public void resize(Resizer resizer) {
-    super.resize(resizer);
-    counts = resizer.resize(counts, 0);
+    @Override
+    public void resize(Resizer resizer) {
+      super.resize(resizer);
+      counts = resizer.resize(counts, 0);
+    }
   }
-}
 
-class VarianceSlotAcc extends DoubleFuncSlotAcc {
-  int[] counts;
-  double[] sum;
+  static class VarianceSlotAcc extends DoubleFuncSlotAcc {
+    int[] counts;
+    double[] sum;
 
-  public VarianceSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    super(values, fcontext, numSlots);
-    counts = new int[numSlots];
-    sum = new double[numSlots];
-  }
+    public VarianceSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      super(values, fcontext, numSlots);
+      counts = new int[numSlots];
+      sum = new double[numSlots];
+    }
 
-  @Override
-  public void reset() {
-    super.reset();
-    Arrays.fill(counts, 0);
-    Arrays.fill(sum, 0);
-  }
+    @Override
+    public void reset() {
+      super.reset();
+      Arrays.fill(counts, 0);
+      Arrays.fill(sum, 0);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    super.resize(resizer);
-    this.counts = resizer.resize(this.counts, 0);
-    this.sum = resizer.resize(this.sum, 0);
-  }
+    @Override
+    public void resize(Resizer resizer) {
+      super.resize(resizer);
+      this.counts = resizer.resize(this.counts, 0);
+      this.sum = resizer.resize(this.sum, 0);
+    }
 
-  private double variance(int slot) {
-    return AggUtil.variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
-  }
+    private double variance(int slot) {
+      return AggUtil.variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
+    }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(this.variance(slotA), this.variance(slotB));
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(this.variance(slotA), this.variance(slotB));
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    if (fcontext.isShard()) {
-      ArrayList lst = new ArrayList(3);
-      lst.add(counts[slot]);
-      lst.add(result[slot]);
-      lst.add(sum[slot]);
-      return lst;
-    } else {
-      return this.variance(slot);
+    @Override
+    public Object getValue(int slot) {
+      if (fcontext.isShard()) {
+      ArrayList<Object> lst = new ArrayList<>(3);
+        lst.add(counts[slot]);
+        lst.add(result[slot]);
+        lst.add(sum[slot]);
+        return lst;
+      } else {
+        return this.variance(slot);
+      }
     }
-  }
 
-  @Override
-  public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
-    double val = values.doubleVal(doc);
-    if (values.exists(doc)) {
-      counts[slot]++;
-      result[slot] += val * val;
-      sum[slot] += val;
+    @Override
+    public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
+      double val = values.doubleVal(doc);
+      if (values.exists(doc)) {
+        counts[slot]++;
+        result[slot] += val * val;
+        sum[slot] += val;
+      }
     }
   }
-}
 
-class StddevSlotAcc extends DoubleFuncSlotAcc {
-  int[] counts;
-  double[] sum;
+  static class StddevSlotAcc extends DoubleFuncSlotAcc {
+    int[] counts;
+    double[] sum;
 
-  public StddevSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
-    super(values, fcontext, numSlots);
-    counts = new int[numSlots];
-    sum = new double[numSlots];
-  }
+    public StddevSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
+      super(values, fcontext, numSlots);
+      counts = new int[numSlots];
+      sum = new double[numSlots];
+    }
 
-  @Override
-  public void reset() {
-    super.reset();
-    Arrays.fill(counts, 0);
-    Arrays.fill(sum, 0);
-  }
+    @Override
+    public void reset() {
+      super.reset();
+      Arrays.fill(counts, 0);
+      Arrays.fill(sum, 0);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    super.resize(resizer);
-    this.counts = resizer.resize(this.counts, 0);
-    this.result = resizer.resize(this.result, 0);
-  }
+    @Override
+    public void resize(Resizer resizer) {
+      super.resize(resizer);
+      this.counts = resizer.resize(this.counts, 0);
+      this.result = resizer.resize(this.result, 0);
+    }
 
-  private double stdDev(int slot) {
-    return AggUtil.stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
-  }
+    private double stdDev(int slot) {
+      return AggUtil.stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
+    }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(this.stdDev(slotA), this.stdDev(slotB));
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(this.stdDev(slotA), this.stdDev(slotB));
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    if (fcontext.isShard()) {
-      ArrayList lst = new ArrayList(3);
-      lst.add(counts[slot]);
-      lst.add(result[slot]);
-      lst.add(sum[slot]);
-      return lst;
-    } else {
-      return this.stdDev(slot);
+    @Override
+    @SuppressWarnings({"unchecked"})
+    public Object getValue(int slot) {
+      if (fcontext.isShard()) {
+        ArrayList<Object> lst = new ArrayList<>(3);
+        lst.add(counts[slot]);
+        lst.add(result[slot]);
+        lst.add(sum[slot]);
+        return lst;
+      } else {
+        return this.stdDev(slot);
+      }
     }
-  }
 
-  @Override
-  public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
-    double val = values.doubleVal(doc);
-    if (values.exists(doc)) {
-      counts[slot]++;
-      result[slot] += val * val;
-      sum[slot] += val;
+    @Override
+    public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
+      double val = values.doubleVal(doc);
+      if (values.exists(doc)) {
+        counts[slot]++;
+        result[slot] += val * val;
+        sum[slot] += val;
+      }
     }
   }
-}
 
-abstract class CountSlotAcc extends SlotAcc {
-  public CountSlotAcc(FacetRequest.FacetContext fcontext) {
-    super(fcontext);
-  }
+  abstract static class CountSlotAcc extends SlotAcc {
+    public CountSlotAcc(FacetContext fcontext) {
+      super(fcontext);
+    }
 
-  public abstract void incrementCount(int slot, long count);
+    public abstract void incrementCount(int slot, long count);
 
-  public abstract long getCount(int slot);
-}
+    public abstract long getCount(int slot);
+  }
 
-class CountSlotArrAcc extends CountSlotAcc {
-  long[] result;
+  static class CountSlotArrAcc extends CountSlotAcc {
+    long[] result;
 
-  public CountSlotArrAcc(FacetRequest.FacetContext fcontext, int numSlots) {
-    super(fcontext);
-    result = new long[numSlots];
-  }
+    public CountSlotArrAcc(FacetContext fcontext, int numSlots) {
+      super(fcontext);
+      result = new long[numSlots];
+    }
 
-  @Override
-  public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) {
-    // TODO: count arrays can use fewer bytes based on the number of docs in
-    // the base set (that's the upper bound for single valued) - look at ttf?
-    result[slotNum]++;
-  }
+    @Override
+    public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) {
+      // TODO: count arrays can use fewer bytes based on the number of docs in
+      // the base set (that's the upper bound for single valued) - look at ttf?
+      result[slotNum]++;
+    }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Long.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Long.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slotNum) throws IOException {
-    return result[slotNum];
-  }
+    @Override
+    public Object getValue(int slotNum) throws IOException {
+      return result[slotNum];
+    }
 
-  @Override
-  public void incrementCount(int slot, long count) {
-    result[slot] += count;
-  }
+    @Override
+    public void incrementCount(int slot, long count) {
+      result[slot] += count;
+    }
 
-  @Override
-  public long getCount(int slot) {
-    return result[slot];
-  }
+    @Override
+    public long getCount(int slot) {
+      return result[slot];
+    }
 
-  // internal and expert
-  long[] getCountArray() {
-    return result;
-  }
+    // internal and expert
+    long[] getCountArray() {
+      return result;
+    }
 
-  @Override
-  public void reset() {
-    Arrays.fill(result, 0);
-  }
+    @Override
+    public void reset() {
+      Arrays.fill(result, 0);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    result = resizer.resize(result, 0);
+    @Override
+    public void resize(Resizer resizer) {
+      result = resizer.resize(result, 0);
+    }
   }
-}
 
-class SortSlotAcc extends SlotAcc {
-  public SortSlotAcc(FacetRequest.FacetContext fcontext) {
-    super(fcontext);
-  }
+  static class SortSlotAcc extends SlotAcc {
+    public SortSlotAcc(FacetContext fcontext) {
+      super(fcontext);
+    }
 
-  @Override
-  public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
-    // no-op
-  }
+    @Override
+    public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
+      // no-op
+    }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return slotA - slotB;
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return slotA - slotB;
+    }
 
-  @Override
-  public Object getValue(int slotNum) {
-    return slotNum;
-  }
+    @Override
+    public Object getValue(int slotNum) {
+      return slotNum;
+    }
 
-  @Override
-  public void reset() {
-    // no-op
-  }
+    @Override
+    public void reset() {
+      // no-op
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    // sort slot only works with direct-mapped accumulators
-    throw new UnsupportedOperationException();
+    @Override
+    public void resize(Resizer resizer) {
+      // sort slot only works with direct-mapped accumulators
+      throw new UnsupportedOperationException();
+    }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
index 4f9c9b8..368a62f 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/StddevAgg.java
@@ -32,7 +32,7 @@ public class StddevAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -57,7 +57,7 @@ public class StddevAgg extends SimpleAggValueSource {
       }
       vs = sf.getType().getValueSource(sf, null);
     }
-    return new StddevSlotAcc(vs, fcontext, numSlots);
+    return new SlotAcc.StddevSlotAcc(vs, fcontext, numSlots);
   }
 
   @Override
@@ -65,7 +65,7 @@ public class StddevAgg extends SimpleAggValueSource {
     return new Merger();
   }
 
-  private static class Merger extends FacetDoubleMerger {
+  private static class Merger extends FacetModule.FacetDoubleMerger {
     long count;
     double sumSq;
     double sum;
@@ -90,9 +90,9 @@ public class StddevAgg extends SimpleAggValueSource {
     }    
   }
 
-  class StddevSortedNumericAcc extends SDVSortedNumericAcc {
+  class StddevSortedNumericAcc extends DocValuesAcc.SDVSortedNumericAcc {
 
-    public StddevSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public StddevSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
@@ -102,9 +102,9 @@ public class StddevAgg extends SimpleAggValueSource {
     }
   }
 
-  class StddevSortedSetAcc extends SDVSortedSetAcc {
+  class StddevSortedSetAcc extends DocValuesAcc.SDVSortedSetAcc {
 
-    public StddevSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public StddevSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
@@ -114,9 +114,9 @@ public class StddevAgg extends SimpleAggValueSource {
     }
   }
 
-  class StddevUnInvertedFieldAcc extends SDVUnInvertedFieldAcc {
+  class StddevUnInvertedFieldAcc extends UnInvertedFieldAcc.SDVUnInvertedFieldAcc {
 
-    public StddevUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public StddevUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java b/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java
index 065faf6..05f5476 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SumAgg.java
@@ -34,7 +34,7 @@ public class SumAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -59,7 +59,7 @@ public class SumAgg extends SimpleAggValueSource {
       }
       vs = sf.getType().getValueSource(sf, null);
     }
-    return new SumSlotAcc(vs, fcontext, numSlots);
+    return new SlotAcc.SumSlotAcc(vs, fcontext, numSlots);
   }
 
   @Override
@@ -67,7 +67,7 @@ public class SumAgg extends SimpleAggValueSource {
     return new Merger();
   }
 
-  public static class Merger extends FacetDoubleMerger {
+  public static class Merger extends FacetModule.FacetDoubleMerger {
     double val;
 
     @Override
@@ -80,9 +80,9 @@ public class SumAgg extends SimpleAggValueSource {
     }
   }
 
-  class SumSortedNumericAcc extends DoubleSortedNumericDVAcc {
+  class SumSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc {
 
-    public SumSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public SumSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
@@ -95,9 +95,9 @@ public class SumAgg extends SimpleAggValueSource {
 
   }
 
-  class SumSortedSetAcc extends DoubleSortedSetDVAcc {
+  class SumSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc {
 
-    public SumSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public SumSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
@@ -113,9 +113,9 @@ public class SumAgg extends SimpleAggValueSource {
     }
   }
 
-  class SumUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
+  class SumUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc {
 
-    public SumUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public SumUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java b/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java
index d892179..12eec86 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/SumsqAgg.java
@@ -33,7 +33,7 @@ public class SumsqAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -58,7 +58,7 @@ public class SumsqAgg extends SimpleAggValueSource {
       }
       vs = sf.getType().getValueSource(sf, null);
     }
-    return new SumsqSlotAcc(vs, fcontext, numSlots);
+    return new SlotAcc.SumsqSlotAcc(vs, fcontext, numSlots);
   }
 
   @Override
@@ -66,9 +66,9 @@ public class SumsqAgg extends SimpleAggValueSource {
     return new SumAgg.Merger();
   }
 
-  class SumSqSortedNumericAcc extends DoubleSortedNumericDVAcc {
+  class SumSqSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc {
 
-    public SumSqSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public SumSqSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
@@ -81,9 +81,9 @@ public class SumsqAgg extends SimpleAggValueSource {
     }
   }
 
-  class SumSqSortedSetAcc extends DoubleSortedSetDVAcc {
+  class SumSqSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc {
 
-    public SumSqSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public SumSqSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
@@ -99,9 +99,9 @@ public class SumsqAgg extends SimpleAggValueSource {
     }
   }
 
-  class SumSqUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
+  class SumSqUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc {
 
-    public SumSqUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public SumSqUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots, 0);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
index f0dd7df..04f88f9 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedField.java
@@ -315,7 +315,7 @@ public class UnInvertedField extends DocTermOrds {
 
 
 
-  private void getCounts(FacetFieldProcessorByArrayUIF processor, CountSlotAcc counts) throws IOException {
+  private void getCounts(FacetFieldProcessorByArrayUIF processor, SlotAcc.CountSlotAcc counts) throws IOException {
     DocSet docs = processor.fcontext.base;
     int baseSize = docs.size();
     int maxDoc = searcher.maxDoc();
@@ -427,7 +427,7 @@ public class UnInvertedField extends DocTermOrds {
     DocSet docs = processor.fcontext.base;
 
     int uniqueTerms = 0;
-    final CountSlotAcc countAcc = processor.countAcc;
+    final SlotAcc.CountSlotAcc countAcc = processor.countAcc;
 
     for (TopTerm tt : bigTerms.values()) {
       if (tt.termNum >= startTermIndex && tt.termNum < endTermIndex) {
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java
index 5f662d7..2096773 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java
@@ -36,7 +36,7 @@ public abstract class UnInvertedFieldAcc extends SlotAcc implements UnInvertedFi
   UnInvertedField.DocToTerm docToTerm;
   SchemaField sf;
 
-  public UnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+  public UnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
     super(fcontext);
     this.sf = sf;
     uif = UnInvertedField.getUnInvertedField(sf.getName(), fcontext.qcontext.searcher());
@@ -51,109 +51,111 @@ public abstract class UnInvertedFieldAcc extends SlotAcc implements UnInvertedFi
       docToTerm = null;
     }
   }
-}
 
-abstract class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc {
-  double[] result;
-  int currentSlot;
-  double initialValue;
 
-  public DoubleUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
-    super(fcontext, sf, numSlots);
-    result = new double[numSlots];
-    if (initialValue != 0) {
-      this.initialValue = initialValue;
-      Arrays.fill(result, initialValue);
+  abstract static class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc {
+    double[] result;
+    int currentSlot;
+    double initialValue;
+
+    public DoubleUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
+      super(fcontext, sf, numSlots);
+      result = new double[numSlots];
+      if (initialValue != 0) {
+        this.initialValue = initialValue;
+        Arrays.fill(result, initialValue);
+      }
     }
-  }
 
-  @Override
-  public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
-    this.currentSlot = slot;
-    docToTerm.getBigTerms(doc + currentDocBase, this);
-    docToTerm.getSmallTerms(doc + currentDocBase, this);
-  }
+    @Override
+    public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
+      this.currentSlot = slot;
+      docToTerm.getBigTerms(doc + currentDocBase, this);
+      docToTerm.getSmallTerms(doc + currentDocBase, this);
+    }
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(result[slotA], result[slotB]);
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(result[slotA], result[slotB]);
+    }
 
-  @Override
-  public Object getValue(int slotNum) throws IOException {
-    return result[slotNum];
-  }
+    @Override
+    public Object getValue(int slotNum) throws IOException {
+      return result[slotNum];
+    }
 
-  @Override
-  public void reset() throws IOException {
-    Arrays.fill(result, initialValue);
-  }
+    @Override
+    public void reset() throws IOException {
+      Arrays.fill(result, initialValue);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
+    @Override
+    public void resize(Resizer resizer) {
     this.result = resizer.resize(result, initialValue);
+    }
   }
-}
 
-/**
- * Base accumulator to compute standard deviation and variance for uninvertible fields
- */
-abstract class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
-  int[] counts;
-  double[] sum;
-
-  public SDVUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
-    super(fcontext, sf, numSlots, 0);
-    this.counts = new int[numSlots];
-    this.sum = new double[numSlots];
-  }
+  /**
+   * Base accumulator to compute standard deviation and variance for uninvertible fields
+   */
+  abstract static class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
+    int[] counts;
+    double[] sum;
+
+    public SDVUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+      super(fcontext, sf, numSlots, 0);
+      this.counts = new int[numSlots];
+      this.sum = new double[numSlots];
+    }
 
-  @Override
-  public void call(int termNum) {
-    try {
-      BytesRef term = docToTerm.lookupOrd(termNum);
-      Object obj = sf.getType().toObject(sf, term);
-      double val = obj instanceof Date ? ((Date)obj).getTime(): ((Number)obj).doubleValue();
-      result[currentSlot] += val * val;
-      sum[currentSlot]+= val;
-      counts[currentSlot]++;
-    } catch (IOException e) {
-      // find a better way to do it
-      throw new UncheckedIOException(e);
+    @Override
+    public void call(int termNum) {
+      try {
+        BytesRef term = docToTerm.lookupOrd(termNum);
+        Object obj = sf.getType().toObject(sf, term);
+        double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue();
+        result[currentSlot] += val * val;
+        sum[currentSlot] += val;
+        counts[currentSlot]++;
+      } catch (IOException e) {
+        // find a better way to do it
+        throw new UncheckedIOException(e);
+      }
     }
-  }
 
-  protected abstract double computeVal(int slot);
+    protected abstract double computeVal(int slot);
 
-  @Override
-  public int compare(int slotA, int slotB) {
-    return Double.compare(computeVal(slotA), computeVal(slotB));
-  }
+    @Override
+    public int compare(int slotA, int slotB) {
+      return Double.compare(computeVal(slotA), computeVal(slotB));
+    }
 
-  @Override
-  public Object getValue(int slot) {
-    if (fcontext.isShard()) {
-      ArrayList lst = new ArrayList(3);
-      lst.add(counts[slot]);
-      lst.add(result[slot]);
-      lst.add(sum[slot]);
-      return lst;
-    } else {
-      return computeVal(slot);
+    @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
+    public Object getValue(int slot) {
+      if (fcontext.isShard()) {
+        ArrayList lst = new ArrayList(3);
+        lst.add(counts[slot]);
+        lst.add(result[slot]);
+        lst.add(sum[slot]);
+        return lst;
+      } else {
+        return computeVal(slot);
+      }
     }
-  }
 
-  @Override
-  public void reset() throws IOException {
-    super.reset();
-    Arrays.fill(counts, 0);
-    Arrays.fill(sum, 0);
-  }
+    @Override
+    public void reset() throws IOException {
+      super.reset();
+      Arrays.fill(counts, 0);
+      Arrays.fill(sum, 0);
+    }
 
-  @Override
-  public void resize(Resizer resizer) {
-    super.resize(resizer);
+    @Override
+    public void resize(Resizer resizer) {
+      super.resize(resizer);
     this.counts = resizer.resize(counts, 0);
     this.sum = resizer.resize(sum, 0);
+    }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java
index c6c520c..7f476d6 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java
@@ -42,7 +42,7 @@ public class UniqueAgg extends StrAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     SchemaField sf = fcontext.qcontext.searcher().getSchema().getField(getArg());
     if (sf.multiValued() || sf.getType().multiValuedFieldCache()) {
       if (sf.getType().isPointField()) {
@@ -66,7 +66,7 @@ public class UniqueAgg extends StrAggValueSource {
     return new Merger();
   }
 
-  private static class Merger extends FacetSortableMerger {
+  private static class Merger extends FacetModule.FacetSortableMerger {
     long answer = -1;
     long sumUnique;
     Set<Object> values;
@@ -75,6 +75,7 @@ public class UniqueAgg extends StrAggValueSource {
     long shardsMissingMax;
 
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public void merge(Object facetResult, Context mcontext) {
       SimpleOrderedMap map = (SimpleOrderedMap)facetResult;
       long unique = ((Number)map.get(UNIQUE)).longValue();
@@ -117,7 +118,7 @@ public class UniqueAgg extends StrAggValueSource {
     }
 
     @Override
-    public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
+    public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
       return Long.compare( getLong(), ((Merger)other).getLong() );
     }
   }
@@ -126,7 +127,7 @@ public class UniqueAgg extends StrAggValueSource {
   static abstract class BaseNumericAcc extends DocValuesAcc {
     LongSet[] sets;
 
-    public BaseNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
+    public BaseNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
       super(fcontext, fcontext.qcontext.searcher().getSchema().getField(field));
       sets = new LongSet[numSlots];
     }
@@ -177,6 +178,7 @@ public class UniqueAgg extends StrAggValueSource {
       return set == null ? 0 : set.cardinality();
     }
 
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public Object getShardValue(int slot) throws IOException {
       LongSet set = sets[slot];
       int unique = getCardinality(slot);
@@ -212,7 +214,7 @@ public class UniqueAgg extends StrAggValueSource {
   static class NumericAcc extends BaseNumericAcc {
     NumericDocValues values;
 
-    public NumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
+    public NumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
       super(fcontext, field, numSlots);
     }
 
@@ -235,7 +237,7 @@ public class UniqueAgg extends StrAggValueSource {
   static class SortedNumericAcc extends BaseNumericAcc {
     SortedNumericDocValues values;
 
-    public SortedNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
+    public SortedNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
       super(fcontext, field, numSlots);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java
index 860cf28..efa8dbf 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java
@@ -27,7 +27,7 @@ public abstract class UniqueBlockAgg extends UniqueAgg {
 
     protected int[] lastSeenValuesPerSlot;
 
-    protected UniqueBlockSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots)
+    protected UniqueBlockSlotAcc(FacetContext fcontext, SchemaField field, int numSlots)
         throws IOException { //
       super(fcontext, field, /*numSlots suppressing inherited accumulator */0, null);
       counts = new int[numSlots];
@@ -79,10 +79,10 @@ public abstract class UniqueBlockAgg extends UniqueAgg {
   }
 
   @Override
-  public abstract SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException ;
+  public abstract SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException ;
   
   @Override
   public FacetMerger createFacetMerger(Object prototype) {
-    return new FacetLongMerger() ;
+    return new FacetModule.FacetLongMerger() ;
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockFieldAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockFieldAgg.java
index de4476a..fddb32d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockFieldAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockFieldAgg.java
@@ -27,7 +27,7 @@ public class UniqueBlockFieldAgg extends UniqueBlockAgg {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     final String fieldName = getArg();
     SchemaField sf = fcontext.qcontext.searcher().getSchema().getField(fieldName);
     if (sf.multiValued() || sf.getType().multiValuedFieldCache()) {
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java
index e635012..885725d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java
@@ -34,7 +34,7 @@ public class UniqueBlockQueryAgg extends UniqueBlockAgg {
     private Query query;
     private BitSet parentBitSet;
 
-    private UniqueBlockQuerySlotAcc(FacetRequest.FacetContext fcontext, Query query, int numSlots)
+    private UniqueBlockQuerySlotAcc(FacetContext fcontext, Query query, int numSlots)
         throws IOException { //
       super(fcontext, null, numSlots);
       this.query = query;
@@ -65,7 +65,7 @@ public class UniqueBlockQueryAgg extends UniqueBlockAgg {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     return new UniqueBlockQuerySlotAcc(fcontext, query, numSlots);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java
index 9596fbb..839fc52 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueMultiDvSlotAcc.java
@@ -36,7 +36,7 @@ class UniqueMultiDvSlotAcc extends UniqueSlotAcc {
   LongValues toGlobal;
   SortedSetDocValues subDv;
 
-  public UniqueMultiDvSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
+  public UniqueMultiDvSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
     super(fcontext, field, numSlots, factory);
   }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueMultivaluedSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueMultivaluedSlotAcc.java
index a993248..508da38 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueMultivaluedSlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueMultivaluedSlotAcc.java
@@ -29,7 +29,7 @@ class UniqueMultivaluedSlotAcc extends UniqueSlotAcc implements UnInvertedField.
   private UnInvertedField uif;
   private UnInvertedField.DocToTerm docToTerm;
 
-  public UniqueMultivaluedSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
+  public UniqueMultivaluedSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
     super(fcontext, field, numSlots, factory);
     SolrIndexSearcher searcher = fcontext.qcontext.searcher();
     uif = UnInvertedField.getUnInvertedField(field.getName(), searcher);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueSinglevaluedSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueSinglevaluedSlotAcc.java
index adbc29a..6057dd0 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueSinglevaluedSlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueSinglevaluedSlotAcc.java
@@ -36,7 +36,7 @@ class UniqueSinglevaluedSlotAcc extends UniqueSlotAcc {
   LongValues toGlobal;
   SortedDocValues subDv;
 
-  public UniqueSinglevaluedSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
+  public UniqueSinglevaluedSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
     super(fcontext, field, numSlots, factory);
   }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java
index 0ef10ed..0bb62da 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java
@@ -35,7 +35,7 @@ abstract class UniqueSlotAcc extends SlotAcc {
   int[] counts;  // populated with the cardinality once
   int nTerms;
 
-  public UniqueSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
+  public UniqueSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
     super(fcontext);
     this.factory = factory;
     arr = new FixedBitSet[numSlots];
@@ -74,6 +74,7 @@ abstract class UniqueSlotAcc extends SlotAcc {
     return res;
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private Object getShardHLL(int slot) throws IOException {
     FixedBitSet ords = arr[slot];
     if (ords == null) return HLLAgg.NO_VALUES;
@@ -97,6 +98,7 @@ abstract class UniqueSlotAcc extends SlotAcc {
     return map;
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private Object getShardValue(int slot) throws IOException {
     if (factory != null) return getShardHLL(slot);
     FixedBitSet ords = arr[slot];
diff --git a/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
index e815c40..fa12be6 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/VarianceAgg.java
@@ -31,7 +31,7 @@ public class VarianceAgg extends SimpleAggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     ValueSource vs = getArg();
 
     if (vs instanceof FieldNameValueSource) {
@@ -56,7 +56,7 @@ public class VarianceAgg extends SimpleAggValueSource {
       }
       vs = sf.getType().getValueSource(sf, null);
     }
-    return new VarianceSlotAcc(vs, fcontext, numSlots);
+    return new SlotAcc.VarianceSlotAcc(vs, fcontext, numSlots);
   }
 
   @Override
@@ -64,7 +64,7 @@ public class VarianceAgg extends SimpleAggValueSource {
     return new Merger();
   }
 
-  private static class Merger extends FacetDoubleMerger {
+  private static class Merger extends FacetModule.FacetDoubleMerger {
     long count;
     double sumSq;
     double sum;
@@ -89,9 +89,9 @@ public class VarianceAgg extends SimpleAggValueSource {
     }    
   }
 
-  class VarianceSortedNumericAcc extends SDVSortedNumericAcc {
+  class VarianceSortedNumericAcc extends DocValuesAcc.SDVSortedNumericAcc {
 
-    public VarianceSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public VarianceSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
@@ -101,9 +101,9 @@ public class VarianceAgg extends SimpleAggValueSource {
     }
   }
 
-  class VarianceSortedSetAcc extends SDVSortedSetAcc {
+  class VarianceSortedSetAcc extends DocValuesAcc.SDVSortedSetAcc {
 
-    public VarianceSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public VarianceSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
@@ -113,9 +113,9 @@ public class VarianceAgg extends SimpleAggValueSource {
     }
   }
 
-  class VarianceUnInvertedFieldAcc extends SDVUnInvertedFieldAcc {
+  class VarianceUnInvertedFieldAcc extends UnInvertedFieldAcc.SDVUnInvertedFieldAcc {
 
-    public VarianceUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
+    public VarianceUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
       super(fcontext, sf, numSlots);
     }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
index 876c750..03318be 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSnapshotCloudManager.java
@@ -258,10 +258,10 @@ public class TestSnapshotCloudManager extends SolrCloudTestCase {
     if (!treeOne.equals(treeTwo)) {
       List<String> t1 = new ArrayList<>(treeOne);
       t1.removeAll(treeTwo);
-      log.warn("Only in tree one: " + t1);
+      log.warn("Only in tree one: {}", t1);
       List<String> t2 = new ArrayList<>(treeTwo);
       t2.removeAll(treeOne);
-      log.warn("Only in tree two: " + t2);
+      log.warn("Only in tree two: {}", t2);
     }
     assertEquals(treeOne, treeTwo);
     for (String path : treeOne) {
diff --git a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
index 484b487..320b0a0 100644
--- a/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
+++ b/solr/core/src/test/org/apache/solr/response/JSONWriterTest.java
@@ -211,7 +211,7 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
     methodsExpectedNotOverriden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeIterator(org.apache.solr.common.IteratorWriter) throws java.io.IOException");
     methodsExpectedNotOverriden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeJsonIter(java.util.Iterator) throws java.io.IOException");
 
-    final Class<?> subClass = ArrayOfNameTypeValueJSONWriter.class;
+    final Class<?> subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class;
     final Class<?> superClass = subClass.getSuperclass();
 
     List<Method> allSuperClassMethods = new ArrayList<>();
@@ -256,7 +256,7 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
 
   @Test
   public void testArrntvWriterLacksMethodsOfItsOwn() {
-    final Class<?> subClass = ArrayOfNameTypeValueJSONWriter.class;
+    final Class<?> subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class;
     final Class<?> superClass = subClass.getSuperclass();
     // ArrayOfNamedValuePairJSONWriter is a simple sub-class
     // which should have (almost) no methods of its own
diff --git a/solr/core/src/test/org/apache/solr/search/facet/DebugAgg.java b/solr/core/src/test/org/apache/solr/search/facet/DebugAgg.java
index fb972dc..0f63bf8 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/DebugAgg.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/DebugAgg.java
@@ -68,7 +68,7 @@ class DebugAgg extends AggValueSource {
   }
 
   @Override
-  public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
+  public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
     return new Acc(fcontext, numDocs, numSlots, inner.createSlotAcc(fcontext, numDocs, numSlots));
   }
 
@@ -94,7 +94,7 @@ class DebugAgg extends AggValueSource {
     public long numDocs;
     public int numSlots;
 
-    public Acc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots, SlotAcc sub) {
+    public Acc(FacetContext fcontext, long numDocs, int numSlots, SlotAcc sub) {
       super(fcontext);
       this.last = this;
       this.numDocs = numDocs;
@@ -173,7 +173,7 @@ class DebugAgg extends AggValueSource {
     }
     
     @Override
-    public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) {
+    public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) {
       return new NumShardsAcc(fcontext, numDocs, numSlots);
     }
     
@@ -188,7 +188,7 @@ class DebugAgg extends AggValueSource {
     }
     
     public static class NumShardsAcc extends SlotAcc {
-      public NumShardsAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) {
+      public NumShardsAcc(FacetContext fcontext, long numDocs, int numSlots) {
         super(fcontext);
       }
       
@@ -221,7 +221,7 @@ class DebugAgg extends AggValueSource {
     
     @Override
     public FacetMerger createFacetMerger(Object prototype) {
-      return new FacetLongMerger();
+      return new FacetModule.FacetLongMerger();
     }
     
   }
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
index 386aea0..2a5dcd1 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
@@ -136,7 +136,7 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
     try {
       int nShards = responsesAndTests.length / 2;
       Object jsonFacet = Utils.fromJSONString(facet);
-      FacetRequest.FacetParser parser = new FacetRequest.FacetTopParser(req);
+      FacetParser parser = new FacetRequest.FacetTopParser(req);
       FacetRequest facetRequest = parser.parse(jsonFacet);
 
       FacetMerger merger = null;


[lucene-solr] 24/47: DOAP changes for release 8.5.2

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit b93a1cd98674ea2ca3759ce25dad0e9ed627f391
Author: Mike Drob <md...@apple.com>
AuthorDate: Tue May 26 16:58:52 2020 -0500

    DOAP changes for release 8.5.2
---
 dev-tools/doap/lucene.rdf | 7 +++++++
 dev-tools/doap/solr.rdf   | 7 +++++++
 2 files changed, 14 insertions(+)

diff --git a/dev-tools/doap/lucene.rdf b/dev-tools/doap/lucene.rdf
index 7a7b20d..1cdf552 100644
--- a/dev-tools/doap/lucene.rdf
+++ b/dev-tools/doap/lucene.rdf
@@ -69,6 +69,13 @@
     <!-- NOTE: please insert releases in numeric order, NOT chronologically. -->
    <release>
        <Version>
+         <name>lucene-8.5.2</name>
+         <created>2020-05-26</created>
+         <revision>8.5.2</revision>
+       </Version>
+   </release>
+   <release>
+       <Version>
          <name>lucene-8.5.1</name>
          <created>2020-04-16</created>
          <revision>8.5.1</revision>
diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf
index 30da70b..d3e097f 100644
--- a/dev-tools/doap/solr.rdf
+++ b/dev-tools/doap/solr.rdf
@@ -69,6 +69,13 @@
     <!-- NOTE: please insert releases in numeric order, NOT chronologically. -->
     <release>
          <Version>
+           <name>solr-8.5.2</name>
+           <created>2020-05-26</created>
+           <revision>8.5.2</revision>
+         </Version>
+    </release>
+    <release>
+         <Version>
            <name>solr-8.5.1</name>
            <created>2020-04-16</created>
            <revision>8.5.1</revision>


[lucene-solr] 41/47: SOLR-14494: Refactor BlockJoin to not use Filter (#1523)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 0f545d784e4b6404af3807e588ccc39bdbc56f6e
Author: David Smiley <ds...@apache.org>
AuthorDate: Fri May 29 21:44:44 2020 -0400

    SOLR-14494: Refactor BlockJoin to not use Filter (#1523)
    
    Note: henceforth the perSegFilter cache will internally have values of type BitSetProducer instead of Filter.
---
 .../solr/search/facet/UniqueBlockQueryAgg.java     |   4 +-
 .../solr/search/join/BlockJoinChildQParser.java    |   7 +-
 .../solr/search/join/BlockJoinParentQParser.java   | 105 ++++++++++-----------
 .../search/join/ChildFieldValueSourceParser.java   |   4 +-
 .../join/another/BJQFilterAccessibleTest.java      |   2 +-
 5 files changed, 56 insertions(+), 66 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java
index 885725d..0df743d 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockQueryAgg.java
@@ -25,7 +25,7 @@ import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.BitSet;
 
-import static org.apache.solr.search.join.BlockJoinParentQParser.getCachedFilter;
+import static org.apache.solr.search.join.BlockJoinParentQParser.getCachedBitSetProducer;
 
 public class UniqueBlockQueryAgg extends UniqueBlockAgg {
 
@@ -42,7 +42,7 @@ public class UniqueBlockQueryAgg extends UniqueBlockAgg {
 
     @Override
     public void setNextReader(LeafReaderContext readerContext) throws IOException {
-      this.parentBitSet = getCachedFilter(fcontext.req, query).getFilter().getBitSet(readerContext);
+      this.parentBitSet = getCachedBitSetProducer(fcontext.req, query).getBitSet(readerContext);
     }
 
     @Override
diff --git a/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java b/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java
index 2c005ac..b24e3e4 100644
--- a/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java
@@ -23,7 +23,6 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.join.ToChildBlockJoinQuery;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.search.SolrConstantScoreQuery;
 import org.apache.solr.search.SyntaxError;
 
 public class BlockJoinChildQParser extends BlockJoinParentQParser {
@@ -34,7 +33,7 @@ public class BlockJoinChildQParser extends BlockJoinParentQParser {
 
   @Override
   protected Query createQuery(Query parentListQuery, Query query, String scoreMode) {
-    return new ToChildBlockJoinQuery(query, getFilter(parentListQuery).getFilter());
+    return new ToChildBlockJoinQuery(query, getBitSetProducer(parentListQuery));
   }
 
   @Override
@@ -49,8 +48,6 @@ public class BlockJoinChildQParser extends BlockJoinParentQParser {
         .add(new MatchAllDocsQuery(), Occur.MUST)
         .add(parents, Occur.MUST_NOT)
       .build();
-    SolrConstantScoreQuery wrapped = new SolrConstantScoreQuery(getFilter(notParents));
-    wrapped.setCache(false);
-    return wrapped;
+    return new BitSetProducerQuery(getBitSetProducer(notParents));
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java b/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java
index 416c9f3..151062f 100644
--- a/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java
+++ b/solr/core/src/java/org/apache/solr/search/join/BlockJoinParentQParser.java
@@ -20,22 +20,25 @@ import java.io.IOException;
 import java.util.Objects;
 
 import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.search.ConstantScoreScorer;
+import org.apache.lucene.search.ConstantScoreWeight;
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryVisitor;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.Weight;
 import org.apache.lucene.search.join.BitSetProducer;
 import org.apache.lucene.search.join.QueryBitSetProducer;
 import org.apache.lucene.search.join.ScoreMode;
 import org.apache.lucene.search.join.ToParentBlockJoinQuery;
-import org.apache.lucene.util.BitDocIdSet;
 import org.apache.lucene.util.BitSet;
-import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.BitSetIterator;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.search.BitsFilteredDocIdSet;
-import org.apache.solr.search.Filter;
+import org.apache.solr.search.ExtendedQueryBase;
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.SolrCache;
-import org.apache.solr.search.SolrConstantScoreQuery;
 import org.apache.solr.search.SyntaxError;
 
 public class BlockJoinParentQParser extends FiltersQParser {
@@ -71,42 +74,26 @@ public class BlockJoinParentQParser extends FiltersQParser {
 
   @Override
   protected Query noClausesQuery() throws SyntaxError {
-    SolrConstantScoreQuery wrapped = new SolrConstantScoreQuery(getFilter(parseParentFilter()));
-    wrapped.setCache(false);
-    return wrapped;
+    return new BitSetProducerQuery(getBitSetProducer(parseParentFilter()));
   }
 
   protected Query createQuery(final Query parentList, Query query, String scoreMode) throws SyntaxError {
-    return new AllParentsAware(query, getFilter(parentList).filter, ScoreModeParser.parse(scoreMode), parentList);
+    return new AllParentsAware(query, getBitSetProducer(parentList), ScoreModeParser.parse(scoreMode), parentList);
   }
 
-  BitDocIdSetFilterWrapper getFilter(Query parentList) {
-    return getCachedFilter(req, parentList);
+  BitSetProducer getBitSetProducer(Query query) {
+    return getCachedBitSetProducer(req, query);
   }
 
-  public static BitDocIdSetFilterWrapper getCachedFilter(final SolrQueryRequest request, Query parentList) {
+  public static BitSetProducer getCachedBitSetProducer(final SolrQueryRequest request, Query query) {
     @SuppressWarnings("unchecked")
-    SolrCache<Query, Filter> parentCache = request.getSearcher().getCache(CACHE_NAME);
+    SolrCache<Query, BitSetProducer> parentCache = request.getSearcher().getCache(CACHE_NAME);
     // lazily retrieve from solr cache
-    BitDocIdSetFilterWrapper result;
     if (parentCache != null) {
-      Filter filter = parentCache.computeIfAbsent(parentList,
-          query -> new BitDocIdSetFilterWrapper(createParentFilter(query)));
-      if (filter instanceof BitDocIdSetFilterWrapper) {
-        result = (BitDocIdSetFilterWrapper) filter;
-      } else {
-        result = new BitDocIdSetFilterWrapper(createParentFilter(parentList));
-        // non-atomic update of existing entry to ensure strong-typing
-        parentCache.put(parentList, result);
-      }
+      return parentCache.computeIfAbsent(query, QueryBitSetProducer::new);
     } else {
-      result = new BitDocIdSetFilterWrapper(createParentFilter(parentList));
+      return new QueryBitSetProducer(query);
     }
-    return result;
-  }
-
-  private static BitSetProducer createParentFilter(Query parentQ) {
-    return new QueryBitSetProducer(parentQ);
   }
 
   static final class AllParentsAware extends ToParentBlockJoinQuery {
@@ -123,49 +110,55 @@ public class BlockJoinParentQParser extends FiltersQParser {
     }
   }
 
-  // We need this wrapper since BitDocIdSetFilter does not extend Filter
-  public static class BitDocIdSetFilterWrapper extends Filter {
+  /** A constant score query based on a {@link BitSetProducer}. */
+  static class BitSetProducerQuery extends ExtendedQueryBase {
 
-    private final BitSetProducer filter;
+    final BitSetProducer bitSetProducer;
 
-    BitDocIdSetFilterWrapper(BitSetProducer filter) {
-      this.filter = filter;
+    public BitSetProducerQuery(BitSetProducer bitSetProducer) {
+      this.bitSetProducer = bitSetProducer;
+      setCache(false); // because we assume the bitSetProducer is itself cached
     }
 
     @Override
-    public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
-      BitSet set = filter.getBitSet(context);
-      if (set == null) {
-        return null;
-      }
-      return BitsFilteredDocIdSet.wrap(new BitDocIdSet(set), acceptDocs);
+    public String toString(String field) {
+      return getClass().getSimpleName() + "(" + bitSetProducer + ")";
     }
 
-    public BitSetProducer getFilter() {
-      return filter;
+    @Override
+    public boolean equals(Object other) {
+      return sameClassAs(other) && Objects.equals(bitSetProducer, getClass().cast(other).bitSetProducer);
     }
 
     @Override
-    public String toString(String field) {
-      return getClass().getSimpleName() + "(" + filter + ")";
+    public int hashCode() {
+      return classHash() + bitSetProducer.hashCode();
     }
 
     @Override
-    public boolean equals(Object other) {
-      return sameClassAs(other) &&
-             Objects.equals(filter, getClass().cast(other).getFilter());
+    public void visit(QueryVisitor visitor) {
+      visitor.visitLeaf(this);
     }
 
     @Override
-    public int hashCode() {
-      return classHash() + filter.hashCode();
+    public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException {
+      return new ConstantScoreWeight(BitSetProducerQuery.this, boost) {
+        @Override
+        public Scorer scorer(LeafReaderContext context) throws IOException {
+          BitSet bitSet = bitSetProducer.getBitSet(context);
+          if (bitSet == null) {
+            return null;
+          }
+          DocIdSetIterator disi = new BitSetIterator(bitSet, bitSet.approximateCardinality());
+          return new ConstantScoreScorer(this, boost, scoreMode, disi);
+        }
+
+        @Override
+        public boolean isCacheable(LeafReaderContext ctx) {
+          return getCache();
+        }
+      };
     }
   }
 
 }
-
-
-
-
-
-
diff --git a/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java
index 768c7e3..30be3a3 100644
--- a/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java
+++ b/solr/core/src/java/org/apache/solr/search/join/ChildFieldValueSourceParser.java
@@ -180,8 +180,8 @@ public class ChildFieldValueSourceParser extends ValueSourceParser {
       }
       bjQ = (AllParentsAware) query;
       
-      parentFilter = BlockJoinParentQParser.getCachedFilter(fp.getReq(), bjQ.getParentQuery()).getFilter();
-      childFilter = BlockJoinParentQParser.getCachedFilter(fp.getReq(), bjQ.getChildQuery()).getFilter();
+      parentFilter = BlockJoinParentQParser.getCachedBitSetProducer(fp.getReq(), bjQ.getParentQuery());
+      childFilter = BlockJoinParentQParser.getCachedBitSetProducer(fp.getReq(), bjQ.getChildQuery());
 
       if (sortFieldName==null || sortFieldName.equals("")) {
         throw new SyntaxError ("field is omitted in "+fp.getString());
diff --git a/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java b/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java
index 96ac205..d5044e6 100644
--- a/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java
+++ b/solr/core/src/test/org/apache/solr/search/join/another/BJQFilterAccessibleTest.java
@@ -46,7 +46,7 @@ public class BJQFilterAccessibleTest  extends SolrTestCaseJ4 {
       TermQuery childQuery = new TermQuery(new Term("child_s", "l"));
       Query parentQuery = new WildcardQuery(new Term("parent_s", "*"));
       ToParentBlockJoinQuery tpbjq = new ToParentBlockJoinQuery(childQuery,
-          BlockJoinParentQParser.getCachedFilter(req,parentQuery).getFilter(), ScoreMode.Max);
+          BlockJoinParentQParser.getCachedBitSetProducer(req,parentQuery), ScoreMode.Max);
       Assert.assertEquals(6, req.getSearcher().search(tpbjq,10).totalHits.value);
     }
   }


[lucene-solr] 03/47: SOLR-14472: missed CHANGES.txt

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit e0e50b9ebaf7b2d75f830f761ac259f7100366d1
Author: David Smiley <ds...@salesforce.com>
AuthorDate: Mon May 18 17:45:55 2020 -0400

    SOLR-14472: missed CHANGES.txt
---
 solr/CHANGES.txt | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 3351970..2f878d0 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -135,6 +135,9 @@ Optimizations
   value of matching documents in the response (numFound) will be an approximation.
   (Ishan Chattopadhyaya, Munendra S N, Tomás Fernández Löbbe)
 
+* SOLR-14472: Autoscaling "cores" preference now retrieves the core count more efficiently, and counts all cores.
+  (David Smiley)
+
 Bug Fixes
 ---------------------
 * SOLR-13264: IndexSizeTrigger aboveOp / belowOp properties not in valid properties.


[lucene-solr] 08/47: SOLR-14484: avoid putting null into MDC Co-authored-by: Andras Salamon

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit cd6b9b93ac5a2498bc84219d64ad28f7836a7b8e
Author: David Smiley <ds...@salesforce.com>
AuthorDate: Wed May 20 09:46:15 2020 -0400

    SOLR-14484: avoid putting null into MDC
    Co-authored-by: Andras Salamon
---
 .../apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java
index ea0f773..0883e7f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java
@@ -329,7 +329,7 @@ public class ConcurrentUpdateHttp2SolrClient extends SolrClient {
 
   // *must* be called with runners monitor held, e.g. synchronized(runners){ addRunner() }
   private void addRunner() {
-    MDC.put("ConcurrentUpdateHttp2SolrClient.url", client.getBaseURL());
+    MDC.put("ConcurrentUpdateHttp2SolrClient.url", String.valueOf(client.getBaseURL())); // MDC can't have null value
     try {
       Runner r = new Runner();
       runners.add(r);


[lucene-solr] 18/47: SOLR-13325: Add a collection selector to ComputePlanAction (#1512)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit a00d7eb4399c2e90182a4deeac28e015a0eaac92
Author: Shalin Shekhar Mangar <sh...@apache.org>
AuthorDate: Fri May 22 10:36:27 2020 +0530

    SOLR-13325: Add a collection selector to ComputePlanAction (#1512)
    
    ComputePlanAction now supports a collection selector of the form `collections: {policy: my_policy}` which can be used to select multiple collections that match collection property/value pairs. This is useful to maintain a whitelist of collections for which actions should be taken without needing to hard-code the collection names. The collection hints are pushed down to the policy engine so operations for non-matching collections are not computed at all. The AutoAddReplicasPlanAction n [...]
---
 solr/CHANGES.txt                                   |   6 +
 .../autoscaling/AutoAddReplicasPlanAction.java     |  47 ++----
 .../solr/cloud/autoscaling/ComputePlanAction.java  | 167 +++++++++++++--------
 .../autoscaling/AutoAddReplicasPlanActionTest.java |  12 +-
 .../cloud/autoscaling/ComputePlanActionTest.java   |  80 ++++++----
 .../src/solrcloud-autoscaling-trigger-actions.adoc |  71 ++++++++-
 6 files changed, 245 insertions(+), 138 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2629407..51c0fec 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -119,6 +119,12 @@ Improvements
 
 * SOLR-14407: Handle shards.purpose in the postlogs tool (Joel Bernstein)
 
+* SOLR-13325: ComputePlanAction now supports a collection selector of the form `collections: {policy: my_policy}`
+  which can be used to select multiple collections that match collection property/value pairs. This is useful to
+  maintain a whitelist of collections for which actions are taken without needing to hard code the collection names
+  themselves. The collection hints are pushed down to the policy engine so operations for non-matching collections
+  are not computed at all. (ab, shalin)
+
 Optimizations
 ---------------------
 * SOLR-8306: Do not collect expand documents when expand.rows=0 (Marshall Sanders, Amelia Henderson)
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java
index fdd3474..d129fdb 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanAction.java
@@ -18,46 +18,23 @@
 package org.apache.solr.cloud.autoscaling;
 
 
-import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
 
-import org.apache.solr.client.solrj.cloud.autoscaling.NoneSuggester;
-import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
-import org.apache.solr.client.solrj.impl.ClusterStateProvider;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.core.SolrResourceLoader;
 
+import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
+
+/**
+ * This class configures the parent ComputePlanAction to compute plan
+ * only for collections which have autoAddReplicas=true.
+ */
 public class AutoAddReplicasPlanAction extends ComputePlanAction {
 
   @Override
-  protected Suggester getSuggester(Policy.Session session, TriggerEvent event, ActionContext context, SolrCloudManager cloudManager) throws IOException {
-    // for backward compatibility
-    ClusterStateProvider stateProvider = cloudManager.getClusterStateProvider();
-    String autoAddReplicas = stateProvider.getClusterProperty(ZkStateReader.AUTO_ADD_REPLICAS, (String) null);
-    if (autoAddReplicas != null && autoAddReplicas.equals("false")) {
-      return NoneSuggester.get(session);
-    }
-
-    Suggester suggester = super.getSuggester(session, event, context, cloudManager);
-    ClusterState clusterState;
-    try {
-      clusterState = stateProvider.getClusterState();
-    } catch (IOException e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Exception getting cluster state", e);
-    }
-
-    boolean anyCollections = false;
-    for (DocCollection collection: clusterState.getCollectionsMap().values()) {
-      if (collection.getAutoAddReplicas()) {
-        anyCollections = true;
-        suggester.hint(Suggester.Hint.COLL, collection.getName());
-      }
-    }
-
-    if (!anyCollections) return NoneSuggester.get(session);
-    return suggester;
+  public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map<String, Object> properties) throws TriggerValidationException {
+    properties.put("collections", Collections.singletonMap(AUTO_ADD_REPLICAS, "true"));
+    super.configure(loader, cloudManager, properties);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
index 87dd0c3..fad45e0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
@@ -17,38 +17,28 @@
 
 package org.apache.solr.cloud.autoscaling;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
-import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
-import org.apache.solr.client.solrj.cloud.autoscaling.NoneSuggester;
-import org.apache.solr.client.solrj.cloud.autoscaling.Policy;
-import org.apache.solr.client.solrj.cloud.autoscaling.PolicyHelper;
-import org.apache.solr.client.solrj.cloud.autoscaling.Suggester;
-import org.apache.solr.client.solrj.cloud.autoscaling.UnsupportedSuggester;
+import org.apache.solr.client.solrj.cloud.autoscaling.*;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.params.AutoScalingParams;
 import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.util.Pair;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.core.SolrResourceLoader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
 import static org.apache.solr.cloud.autoscaling.TriggerEvent.NODE_NAMES;
 
 /**
@@ -61,7 +51,8 @@ import static org.apache.solr.cloud.autoscaling.TriggerEvent.NODE_NAMES;
 public class ComputePlanAction extends TriggerActionBase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  Set<String> collections = new HashSet<>();
+  // accept all collections by default
+  Predicate<String> collectionsPredicate = s -> true;
 
   public ComputePlanAction() {
     super();
@@ -72,9 +63,37 @@ public class ComputePlanAction extends TriggerActionBase {
   @Override
   public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map<String, Object> properties) throws TriggerValidationException {
     super.configure(loader, cloudManager, properties);
-    String colString = (String) properties.get("collections");
-    if (colString != null && !colString.isEmpty()) {
-      collections.addAll(StrUtils.splitSmart(colString, ','));
+
+    Object value = properties.get("collections");
+    if (value instanceof String) {
+      String colString = (String) value;
+      if (!colString.isEmpty()) {
+        List<String> whiteListedCollections = StrUtils.splitSmart(colString, ',');
+        collectionsPredicate = whiteListedCollections::contains;
+      }
+    } else if (value instanceof Map) {
+      Map<String, String> matchConditions = (Map<String, String>) value;
+      collectionsPredicate = collectionName -> {
+        try {
+          DocCollection collection = cloudManager.getClusterStateProvider().getCollection(collectionName);
+          if (collection == null) {
+            log.debug("Collection: {} was not found while evaluating conditions", collectionName);
+            return false;
+          }
+          for (Map.Entry<String, String> entry : matchConditions.entrySet()) {
+            if (!entry.getValue().equals(collection.get(entry.getKey()))) {
+              if (log.isDebugEnabled()) {
+                log.debug("Collection: {} does not match condition: {}:{}", collectionName, entry.getKey(), entry.getValue());
+              }
+              return false;
+            }
+          }
+          return true;
+        } catch (IOException e) {
+          log.error("Exception fetching collection information for: {}", collectionName, e);
+          return false;
+        }
+      };
     }
   }
 
@@ -142,14 +161,6 @@ public class ComputePlanAction extends TriggerActionBase {
           if (log.isDebugEnabled()) {
             log.debug("Computed Plan: {}", operation.getParams());
           }
-          if (!collections.isEmpty()) {
-            String coll = operation.getParams().get(CoreAdminParams.COLLECTION);
-            if (coll != null && !collections.contains(coll)) {
-              // discard an op that doesn't affect our collections
-              log.debug("-- discarding due to collection={} not in {}", coll, collections);
-              continue;
-            }
-          }
           Map<String, Object> props = context.getProperties();
           props.compute("operations", (k, v) -> {
             List<SolrRequest> operations = (List<SolrRequest>) v;
@@ -217,29 +228,7 @@ public class ComputePlanAction extends TriggerActionBase {
         suggester = getNodeAddedSuggester(cloudManager, session, event);
         break;
       case NODELOST:
-        String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower());
-        CollectionParams.CollectionAction action = CollectionParams.CollectionAction.get(preferredOp);
-        switch (action) {
-          case MOVEREPLICA:
-            suggester = session.getSuggester(action)
-                .hint(Suggester.Hint.SRC_NODE, event.getProperty(NODE_NAMES));
-            break;
-          case DELETENODE:
-            int start = (Integer)event.getProperty(START, 0);
-            List<String> srcNodes = (List<String>) event.getProperty(NODE_NAMES);
-            if (srcNodes.isEmpty() || start >= srcNodes.size()) {
-              return NoneSuggester.get(session);
-            }
-            String sourceNode = srcNodes.get(start);
-            suggester = session.getSuggester(action)
-                .hint(Suggester.Hint.SRC_NODE, Collections.singletonList(sourceNode));
-            event.getProperties().put(START, ++start);
-            break;
-          case NONE:
-            return NoneSuggester.get(session);
-          default:
-            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unsupported preferredOperation: " + action.toLower() + " specified for node lost trigger");
-        }
+        suggester = getNodeLostSuggester(cloudManager, session, event);
         break;
       case SEARCHRATE:
       case METRIC:
@@ -258,13 +247,15 @@ public class ComputePlanAction extends TriggerActionBase {
         for (Map.Entry<Suggester.Hint, Object> e : op.getHints().entrySet()) {
           suggester = suggester.hint(e.getKey(), e.getValue());
         }
+        if (applyCollectionHints(cloudManager, suggester) == 0) return NoneSuggester.get(session);
         suggester = suggester.forceOperation(true);
         event.getProperties().put(START, ++start);
         break;
       case SCHEDULED:
-        preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower());
-        action = CollectionParams.CollectionAction.get(preferredOp);
+        String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower());
+        CollectionParams.CollectionAction action = CollectionParams.CollectionAction.get(preferredOp);
         suggester = session.getSuggester(action);
+        if (applyCollectionHints(cloudManager, suggester) == 0) return NoneSuggester.get(session);
         break;
       default:
         throw new UnsupportedOperationException("No support for events other than nodeAdded, nodeLost, searchRate, metric, scheduled and indexSize. Received: " + event.getEventType());
@@ -272,6 +263,53 @@ public class ComputePlanAction extends TriggerActionBase {
     return suggester;
   }
 
+  private Suggester getNodeLostSuggester(SolrCloudManager cloudManager, Policy.Session session, TriggerEvent event) throws IOException {
+    String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower());
+    CollectionParams.CollectionAction action = CollectionParams.CollectionAction.get(preferredOp);
+    switch (action) {
+      case MOVEREPLICA:
+        Suggester s = session.getSuggester(action)
+                .hint(Suggester.Hint.SRC_NODE, event.getProperty(NODE_NAMES));
+        if (applyCollectionHints(cloudManager, s) == 0) return NoneSuggester.get(session);
+        return s;
+      case DELETENODE:
+        int start = (Integer)event.getProperty(START, 0);
+        List<String> srcNodes = (List<String>) event.getProperty(NODE_NAMES);
+        if (srcNodes.isEmpty() || start >= srcNodes.size()) {
+          return NoneSuggester.get(session);
+        }
+        String sourceNode = srcNodes.get(start);
+        s = session.getSuggester(action)
+                .hint(Suggester.Hint.SRC_NODE, event.getProperty(NODE_NAMES));
+        if (applyCollectionHints(cloudManager, s) == 0) return NoneSuggester.get(session);
+        s.hint(Suggester.Hint.SRC_NODE, Collections.singletonList(sourceNode));
+        event.getProperties().put(START, ++start);
+        return s;
+      case NONE:
+        return NoneSuggester.get(session);
+      default:
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unsupported preferredOperation: " + action.toLower() + " specified for node lost trigger");
+    }
+  }
+
+  /**
+   * Applies collection hints for all collections that match the {@link #collectionsPredicate}
+   * and returns the number of collections that matched.
+   * @return number of collections that match the {@link #collectionsPredicate}
+   * @throws IOException if {@link org.apache.solr.client.solrj.impl.ClusterStateProvider} throws IOException
+   */
+  private int applyCollectionHints(SolrCloudManager cloudManager, Suggester s) throws IOException {
+    ClusterState clusterState = cloudManager.getClusterStateProvider().getClusterState();
+    Set<String> set = clusterState.getCollectionStates().keySet().stream()
+            .filter(collectionRef -> collectionsPredicate.test(collectionRef))
+            .collect(Collectors.toSet());
+    if (set.size() < clusterState.getCollectionStates().size())  {
+      // apply hints only if a subset of collections are selected
+      set.forEach(c -> s.hint(Suggester.Hint.COLL, c));
+    }
+    return set.size();
+  }
+
   private Suggester getNodeAddedSuggester(SolrCloudManager cloudManager, Policy.Session session, TriggerEvent event) throws IOException {
     String preferredOp = (String) event.getProperty(AutoScalingParams.PREFERRED_OP, CollectionParams.CollectionAction.MOVEREPLICA.toLower());
     Replica.Type replicaType = (Replica.Type) event.getProperty(AutoScalingParams.REPLICA_TYPE, Replica.Type.NRT);
@@ -283,17 +321,18 @@ public class ComputePlanAction extends TriggerActionBase {
       case ADDREPLICA:
         // add all collection/shard pairs and let policy engine figure out which one
         // to place on the target node
-        // todo in future we can prune ineligible collection/shard pairs
         ClusterState clusterState = cloudManager.getClusterStateProvider().getClusterState();
         Set<Pair<String, String>> collShards = new HashSet<>();
-        clusterState.getCollectionStates().forEach((collectionName, collectionRef) -> {
-          DocCollection docCollection = collectionRef.get();
-          if (docCollection != null)  {
-            docCollection.getActiveSlices().stream()
-                .map(slice -> new Pair<>(collectionName, slice.getName()))
-                .forEach(collShards::add);
-          }
-        });
+        clusterState.getCollectionStates().entrySet().stream()
+                .filter(e -> collectionsPredicate.test(e.getKey()))
+                .forEach(entry -> {
+                  DocCollection docCollection = entry.getValue().get();
+                  if (docCollection != null) {
+                    docCollection.getActiveSlices().stream()
+                            .map(slice -> new Pair<>(entry.getKey(), slice.getName()))
+                            .forEach(collShards::add);
+                  }
+                });
         suggester.hint(Suggester.Hint.COLL_SHARD, collShards);
         suggester.hint(Suggester.Hint.REPLICATYPE, replicaType);
         break;
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
index b6e6d20..8b41f2f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasPlanActionTest.java
@@ -84,6 +84,7 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
 
     String collection1 = "testSimple1";
     String collection2 = "testSimple2";
+    String collection3 = "testSimple3";
     CollectionAdminRequest.createCollection(collection1, "conf", 2, 2)
         .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
         .setAutoAddReplicas(true)
@@ -94,8 +95,8 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
         .setAutoAddReplicas(false)
         .setMaxShardsPerNode(1)
         .process(cluster.getSolrClient());
-    // the number of cores in jetty1 (5) will be larger than jetty3 (1)
-    CollectionAdminRequest.createCollection("testSimple3", "conf", 3, 1)
+    // the number of cores in jetty1 (6) will be larger than jetty3 (1)
+    CollectionAdminRequest.createCollection(collection3, "conf", 3, 1)
         .setCreateNodeSet(jetty1.getNodeName())
         .setAutoAddReplicas(false)
         .setMaxShardsPerNode(3)
@@ -103,7 +104,7 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
     
     cluster.waitForActiveCollection(collection1, 2, 4);
     cluster.waitForActiveCollection(collection2, 1, 2);
-    cluster.waitForActiveCollection("testSimple3", 3, 3);
+    cluster.waitForActiveCollection(collection3, 3, 3);
     
     // we remove the implicit created trigger, so the replicas won't be moved
     String removeTriggerCommand = "{" +
@@ -139,7 +140,7 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
     
     cluster.waitForActiveCollection(collection1, 2, 4);
     cluster.waitForActiveCollection(collection2, 1, 2);
-    cluster.waitForActiveCollection("testSimple3", 3, 3);
+    cluster.waitForActiveCollection(collection3, 3, 3);
     
     assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 30000));
     
@@ -184,7 +185,7 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
     
     cluster.waitForActiveCollection(collection1, 2, 4);
     cluster.waitForActiveCollection(collection2, 1, 2);
-    cluster.waitForActiveCollection("testSimple3", 3, 3);
+    cluster.waitForActiveCollection(collection3, 3, 3);
     
     assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 30000));
 
@@ -211,6 +212,7 @@ public class AutoAddReplicasPlanActionTest extends SolrCloudTestCase{
   @SuppressForbidden(reason = "Needs currentTimeMillis to create unique id")
   private List<SolrRequest> getOperations(JettySolrRunner actionJetty, String lostNodeName) throws Exception {
     try (AutoAddReplicasPlanAction action = new AutoAddReplicasPlanAction()) {
+      action.configure(actionJetty.getCoreContainer().getResourceLoader(), actionJetty.getCoreContainer().getZkController().getSolrCloudManager(), new HashMap<>());
       TriggerEvent lostNode = new NodeLostTrigger.NodeLostEvent(TriggerEventType.NODELOST, ".auto_add_replicas", Collections.singletonList(System.currentTimeMillis()), Collections.singletonList(lostNodeName), CollectionParams.CollectionAction.MOVEREPLICA.toLower());
       ActionContext context = new ActionContext(actionJetty.getCoreContainer().getZkController().getSolrCloudManager(), null, new HashMap<>());
       action.process(lostNode, context);
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
index 1f5c8e3..471a0a7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ComputePlanActionTest.java
@@ -17,19 +17,6 @@
 
 package org.apache.solr.cloud.autoscaling;
 
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicReference;
-
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.cloud.NodeStateProvider;
@@ -55,14 +42,18 @@ import org.apache.solr.common.util.Pair;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.util.LogLevel;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.*;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MOVEREPLICA;
 
@@ -444,7 +435,33 @@ public class ComputePlanActionTest extends SolrCloudTestCase {
 
   @Test
   //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
-  public void testSelectedCollections() throws Exception {
+  public void testSelectedCollectionsByName() throws Exception {
+    String collectionsFilter = "'testSelected1,testSelected2'";
+    testCollectionsPredicate(collectionsFilter, Collections.emptyMap());
+  }
+
+  @Test
+  //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
+  public void testSelectedCollectionsByPolicy() throws Exception {
+    CloudSolrClient solrClient = cluster.getSolrClient();
+    String setSearchPolicyCommand = "{" +
+            " 'set-policy': {" +
+            "   'search': [" +
+            "      {'replica':'<5', 'shard': '#EACH', 'node': '#ANY'}," +
+            "    ]" +
+            "}}";
+    SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setSearchPolicyCommand);
+    NamedList<Object> response = solrClient.request(req);
+    assertEquals(response.get("result").toString(), "success");
+
+    String collectionsFilter = "{'policy': 'search'}";
+    Map<String, String> createCollectionParameters = new HashMap<>();
+    createCollectionParameters.put("testSelected1", "search");
+    createCollectionParameters.put("testSelected2", "search");
+    testCollectionsPredicate(collectionsFilter, createCollectionParameters);
+  }
+
+  private void testCollectionsPredicate(String collectionsFilter, Map<String, String> createCollectionParameters) throws Exception {
     if (log.isInfoEnabled()) {
       log.info("Found number of jetties: {}", cluster.getJettySolrRunners().size());
     }
@@ -457,28 +474,37 @@ public class ComputePlanActionTest extends SolrCloudTestCase {
 
     CloudSolrClient solrClient = cluster.getSolrClient();
     String setTriggerCommand = "{" +
-        "'set-trigger' : {" +
-        "'name' : 'node_lost_trigger'," +
-        "'event' : 'nodeLost'," +
-        "'waitFor' : '1s'," +
-        "'enabled' : true," +
-        "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction', 'collections' : 'testSelected1,testSelected2'}," +
-        "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" +
-        "}}";
+            "'set-trigger' : {" +
+            "'name' : 'node_lost_trigger'," +
+            "'event' : 'nodeLost'," +
+            "'waitFor' : '1s'," +
+            "'enabled' : true," +
+            "'actions' : [{'name':'compute_plan', 'class' : 'solr.ComputePlanAction', 'collections' : " + collectionsFilter + "}," +
+            "{'name':'test','class':'" + ComputePlanActionTest.AssertingTriggerAction.class.getName() + "'}]" +
+            "}}";
     SolrRequest req = AutoScalingRequest.create(SolrRequest.METHOD.POST, setTriggerCommand);
     NamedList<Object> response = solrClient.request(req);
     assertEquals(response.get("result").toString(), "success");
 
     CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection("testSelected1",
         "conf", 2, 2);
+    if (createCollectionParameters.get("testSelected1") != null)  {
+      create.setPolicy(createCollectionParameters.get("testSelected1"));
+    }
     create.process(solrClient);
 
     create = CollectionAdminRequest.createCollection("testSelected2",
         "conf", 2, 2);
+    if (createCollectionParameters.get("testSelected2") != null)  {
+      create.setPolicy(createCollectionParameters.get("testSelected2"));
+    }
     create.process(solrClient);
 
     create = CollectionAdminRequest.createCollection("testSelected3",
         "conf", 2, 2);
+    if (createCollectionParameters.get("testSelected3") != null)  {
+      create.setPolicy(createCollectionParameters.get("testSelected3"));
+    }
     create.process(solrClient);
     
     cluster.waitForActiveCollection("testSelected1", 2, 4);
diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc
index 3ad3772..099f992 100644
--- a/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc
+++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-trigger-actions.adoc
@@ -29,12 +29,19 @@ commands which can re-balance the cluster in response to trigger events.
 The following parameters are configurable:
 
 `collections`::
-A comma-separated list of collection names. If this list is not empty then
-the computed operations will only calculate collection operations that affect
-listed collections and ignore any other collection operations for collections
-not listed here. Note that non-collection operations are not affected by this.
+A comma-separated list of collection names, or a selector on collection properties that can be used to filter collections for which the plan is computed.
 
-Example configuration:
+If a non-empty list or selector is specified then the computed operations will only calculate collection operations that affect
+matched collections and ignore any other collection operations for collections
+not listed here. This does not affect non-collection operations.
+
+A collection selector is of the form `collections: {key1: value1, key2: value2, ...}` where the key can be any collection property such as `name`, `policy`, `numShards` etc.
+The value must match exactly and all specified properties must match for a collection to match.
+
+A collection selector is useful in a cluster where collections are added and removed frequently and where selecting only collections that
+use a specific autoscaling policy is useful.
+
+Example configurations:
 
 [source,json]
 ----
@@ -48,11 +55,11 @@ Example configuration:
    {
     "name" : "compute_plan",
     "class" : "solr.ComputePlanAction",
-    "collections" : "test1,test2",
+    "collections" : "test1,test2"
    },
    {
     "name" : "execute_plan",
-    "class" : "solr.ExecutePlanAction",
+    "class" : "solr.ExecutePlanAction"
    }
   ]
  }
@@ -63,6 +70,56 @@ In this example only collections `test1` and `test2` will be potentially
 replicated / moved to an added node, other collections will be ignored even
 if they cause policy violations.
 
+[source,json]
+----
+{
+ "set-trigger" : {
+  "name" : "node_added_trigger",
+  "event" : "nodeAdded",
+  "waitFor" : "1s",
+  "enabled" : true,
+  "actions" : [
+   {
+    "name" : "compute_plan",
+    "class" : "solr.ComputePlanAction",
+    "collections" : {"policy": "my_policy"}
+   },
+   {
+    "name" : "execute_plan",
+    "class" : "solr.ExecutePlanAction"
+   }
+  ]
+ }
+}
+----
+
+In this example only collections which use the `my_policy` as their autoscaling policy will be potentially replicated / moved to an added node, other collections will be ignored even if they cause policy violations.
+
+[source,json]
+----
+{
+ "set-trigger" : {
+  "name" : "node_added_trigger",
+  "event" : "nodeAdded",
+  "waitFor" : "1s",
+  "enabled" : true,
+  "actions" : [
+   {
+    "name" : "compute_plan",
+    "class" : "solr.ComputePlanAction",
+    "collections" : {"policy": "my_policy", "numShards" :  "4"}
+   },
+   {
+    "name" : "execute_plan",
+    "class" : "solr.ExecutePlanAction"
+   }
+  ]
+ }
+}
+----
+
+In this example only collections which use the `my_policy` as their autoscaling policy and that have `numShards` equal to `4` will be potentially replicated / moved to an added node, other collections will be ignored even if they cause policy violations.
+
 == Execute Plan Action
 
 The `ExecutePlanAction` executes the Collection API commands emitted by the `ComputePlanAction` against


[lucene-solr] 28/47: Add back-compat indices for 8.5.2

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 607cc5482b4a6c51d6b92779da6010a11226398d
Author: Mike Drob <md...@apple.com>
AuthorDate: Wed May 27 13:18:15 2020 -0500

    Add back-compat indices for 8.5.2
---
 .../apache/lucene/index/TestBackwardsCompatibility.java |   7 +++++--
 .../test/org/apache/lucene/index/index.8.5.2-cfs.zip    | Bin 0 -> 15897 bytes
 .../test/org/apache/lucene/index/index.8.5.2-nocfs.zip  | Bin 0 -> 15902 bytes
 .../src/test/org/apache/lucene/index/sorted.8.5.2.zip   | Bin 0 -> 80768 bytes
 4 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index 245cef1..f4a5841 100644
--- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -305,7 +305,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     "8.5.0-cfs",
     "8.5.0-nocfs",
     "8.5.1-cfs",
-    "8.5.1-nocfs"
+    "8.5.1-nocfs",
+    "8.5.2-cfs",
+    "8.5.2-nocfs"
   };
 
   public static String[] getOldNames() {
@@ -322,7 +324,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
     "sorted.8.4.0",
     "sorted.8.4.1",
     "sorted.8.5.0",
-    "sorted.8.5.1"
+    "sorted.8.5.1",
+    "sorted.8.5.2"
   };
 
   public static String[] getOldSortedNames() {
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip
new file mode 100644
index 0000000..06ef027
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-cfs.zip differ
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip
new file mode 100644
index 0000000..dabe2d4
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/index.8.5.2-nocfs.zip differ
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip b/lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip
new file mode 100644
index 0000000..738f1db
Binary files /dev/null and b/lucene/backward-codecs/src/test/org/apache/lucene/index/sorted.8.5.2.zip differ


[lucene-solr] 33/47: SOLR-11934: REVERT addition of collection to log message This reverts commit e4dc9e94

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit f833cb5fc280360eb6332e04272f96e12a6121a0
Author: David Smiley <ds...@apache.org>
AuthorDate: Thu May 28 11:31:11 2020 -0400

    SOLR-11934: REVERT addition of collection to log message
    This reverts commit e4dc9e94
---
 solr/core/src/java/org/apache/solr/core/SolrCore.java | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 559cacc..311e30c 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -2528,9 +2528,7 @@ public final class SolrCore implements SolrInfoBean, Closeable {
         newSearcher.register(); // register subitems (caches)
 
         if (log.isInfoEnabled()) {
-          log.info("{} Registered new searcher autowarm time: {} ms: Collection: '{}'"
-              , logid, newSearcher.getWarmupTime()
-              , newSearcher.getCore().getCoreDescriptor().getCollectionName());
+          log.info("{} Registered new searcher autowarm time: {} ms", logid, newSearcher.getWarmupTime());
         }
 
       } catch (Exception e) {


[lucene-solr] 13/47: SOLR-14504: ZkController LiveNodesListener has NullPointerException in startup race.

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit aec740dfefe5f21e5ed3f3abb902ef06050ea571
Author: Andrzej Bialecki <ab...@apache.org>
AuthorDate: Thu May 21 18:17:05 2020 +0200

    SOLR-14504: ZkController LiveNodesListener has NullPointerException in startup race.
---
 solr/CHANGES.txt                                           | 3 +++
 solr/core/src/java/org/apache/solr/cloud/ZkController.java | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 26e2cb3..4185e9b 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -192,6 +192,9 @@ Bug Fixes
 * SOLR-14477: Fix incorrect 'relatedness()' calculations in json.facet 'terms' when 'prefix' option is used
   (hossman)
 
+* SOLR-14504: ZkController LiveNodesListener has NullPointerException in startup race.
+  (Colvin Cowie via ab)
+
 Other Changes
 ---------------------
 * SOLR-14197: SolrResourceLoader: marked many methods as deprecated, and in some cases rerouted exiting logic to avoid
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index ecbb781..2cd376c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -1016,7 +1016,7 @@ public class ZkController implements Closeable {
         log.warn("Unable to read autoscaling.json", e1);
       }
       if (createNodes) {
-        byte[] json = Utils.toJSON(Collections.singletonMap("timestamp", cloudManager.getTimeSource().getEpochTimeNs()));
+        byte[] json = Utils.toJSON(Collections.singletonMap("timestamp", getSolrCloudManager().getTimeSource().getEpochTimeNs()));
         for (String n : oldNodes) {
           String path = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + n;
 


[lucene-solr] 06/47: Lucene-9371: Allow external access to RegExp's parsed structure (#1521)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 8f2fc5e190cf09722936ff936b65cc366248b35d
Author: markharwood <ma...@gmail.com>
AuthorDate: Tue May 19 17:38:00 2020 +0100

    Lucene-9371: Allow external access to RegExp's parsed structure (#1521)
    
    Made RegExp internal fields public final to allow external classes to render eg English explanations of pattern logic
---
 .../org/apache/lucene/util/automaton/RegExp.java   | 212 ++++++++++++---------
 1 file changed, 120 insertions(+), 92 deletions(-)

diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java b/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java
index bb8fc26..0874cde 100644
--- a/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java
+++ b/lucene/core/src/java/org/apache/lucene/util/automaton/RegExp.java
@@ -365,8 +365,43 @@ import java.util.Set;
  */
 public class RegExp {
   
-  enum Kind {
-    REGEXP_UNION, REGEXP_CONCATENATION, REGEXP_INTERSECTION, REGEXP_OPTIONAL, REGEXP_REPEAT, REGEXP_REPEAT_MIN, REGEXP_REPEAT_MINMAX, REGEXP_COMPLEMENT, REGEXP_CHAR, REGEXP_CHAR_RANGE, REGEXP_ANYCHAR, REGEXP_EMPTY, REGEXP_STRING, REGEXP_ANYSTRING, REGEXP_AUTOMATON, REGEXP_INTERVAL,
+  /**
+   * The type of expression represented by a RegExp node.
+   */
+  public enum Kind {
+    /** The union of two expressions */
+    REGEXP_UNION, 
+    /** A sequence of two expressions */
+    REGEXP_CONCATENATION,
+    /** The intersection of two expressions */
+    REGEXP_INTERSECTION,
+    /** An optional expression */
+    REGEXP_OPTIONAL,
+    /** An expression that repeats */
+    REGEXP_REPEAT,
+    /** An expression that repeats a minimum number of times*/
+    REGEXP_REPEAT_MIN,
+    /** An expression that repeats a minimum and maximum number of times*/
+    REGEXP_REPEAT_MINMAX,
+    /** The complement of an expression */
+    REGEXP_COMPLEMENT,
+    /** A Character */
+    REGEXP_CHAR,
+    /** A Character range*/
+    REGEXP_CHAR_RANGE,
+    /** Any Character allowed*/
+    REGEXP_ANYCHAR,
+    /** An empty expression*/
+    REGEXP_EMPTY,
+    /** A string expression*/
+    REGEXP_STRING,
+    /** Any string allowed */
+    REGEXP_ANYSTRING,
+    /** An Automaton expression*/
+    REGEXP_AUTOMATON,
+    /** An Interval expression */
+    REGEXP_INTERVAL,
+    /** An expression for a pre-defined class e.g. \w */
     REGEXP_PRE_CLASS
   }
   
@@ -411,21 +446,37 @@ public class RegExp {
    */
   public static final int NONE = 0x0000;
 
+  //Immutable parsed state
+  /**
+   * The type of expression
+   */
+  public final Kind kind;
+  /**
+   * Child expressions held by a container type expression
+   */
+  public final RegExp exp1, exp2;
+  /**
+   * String expression
+   */
+  public final String s;
+  /**
+   *  Character expression
+   */
+  public final int c;
+  /**
+   * Limits for repeatable type expressions
+   */
+  public final int min, max, digits;
+  /**
+   * Extents for range type expressions
+   */
+  public final int from, to;
+
+  // Parser variables
   private final String originalString;
-  Kind kind;
-  RegExp exp1, exp2;
-  String s;
-  int c;
-  int min, max, digits;
-  int from, to;
-  
   int flags;
   int pos;
-  
-  RegExp() {
-    this.originalString = null;
-  }
-  
+    
   /**
    * Constructs new <code>RegExp</code> from a string. Same as
    * <code>RegExp(s, ALL)</code>.
@@ -468,6 +519,37 @@ public class RegExp {
     from = e.from;
     to = e.to;
   }
+  
+  RegExp(Kind kind, RegExp exp1, RegExp exp2, String s, int c, int min, int max, int digits, int from, int to){    
+    this.originalString = null;
+    this.kind = kind;
+    this.flags = 0;
+    this.exp1 = exp1;
+    this.exp2 = exp2;
+    this.s = s;
+    this.c = c;
+    this.min = min;
+    this.max = max;
+    this.digits = digits;
+    this.from = from;
+    this.to = to;
+  }
+
+  // Simplified construction of container nodes
+  static RegExp newContainerNode(Kind kind, RegExp exp1, RegExp exp2) {
+    return new RegExp(kind, exp1, exp2, null, 0, 0, 0, 0, 0, 0);
+  }
+
+  // Simplified construction of repeating nodes
+  static RegExp newRepeatingNode(Kind kind, RegExp exp,  int min, int max) {
+    return new RegExp(kind, exp, null, null, 0, min, max, 0, 0, 0);
+  }  
+  
+  
+  // Simplified construction of leaf nodes
+  static RegExp newLeafNode(Kind kind, String s, int c, int min, int max, int digits, int from, int to) {
+    return new RegExp(kind, null, null, s, c, min, max, digits, from, to);
+  }  
 
   /**
    * Constructs new <code>Automaton</code> from this <code>RegExp</code>. Same
@@ -919,34 +1001,29 @@ public class RegExp {
   }
   
   static RegExp makeUnion(RegExp exp1, RegExp exp2) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_UNION;
-    r.exp1 = exp1;
-    r.exp2 = exp2;
-    return r;
+    return newContainerNode(Kind.REGEXP_UNION, exp1, exp2);
   }
   
   static RegExp makeConcatenation(RegExp exp1, RegExp exp2) {
     if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING)
         && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING)) return makeString(
         exp1, exp2);
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_CONCATENATION;
+    RegExp rexp1, rexp2;
     if (exp1.kind == Kind.REGEXP_CONCATENATION
         && (exp1.exp2.kind == Kind.REGEXP_CHAR || exp1.exp2.kind == Kind.REGEXP_STRING)
         && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING)) {
-      r.exp1 = exp1.exp1;
-      r.exp2 = makeString(exp1.exp2, exp2);
+      rexp1 = exp1.exp1;
+      rexp2 = makeString(exp1.exp2, exp2);
     } else if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING)
         && exp2.kind == Kind.REGEXP_CONCATENATION
         && (exp2.exp1.kind == Kind.REGEXP_CHAR || exp2.exp1.kind == Kind.REGEXP_STRING)) {
-      r.exp1 = makeString(exp1, exp2.exp1);
-      r.exp2 = exp2.exp2;
+      rexp1 = makeString(exp1, exp2.exp1);
+      rexp2 = exp2.exp2;
     } else {
-      r.exp1 = exp1;
-      r.exp2 = exp2;
+      rexp1 = exp1;
+      rexp2 = exp2;
     }
-    return r;
+    return newContainerNode(Kind.REGEXP_CONCATENATION, rexp1, rexp2);
   }
   
   static private RegExp makeString(RegExp exp1, RegExp exp2) {
@@ -959,107 +1036,61 @@ public class RegExp {
   }
   
   static RegExp makeIntersection(RegExp exp1, RegExp exp2) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_INTERSECTION;
-    r.exp1 = exp1;
-    r.exp2 = exp2;
-    return r;
+    return newContainerNode(Kind.REGEXP_INTERSECTION, exp1, exp2);
   }
   
   static RegExp makeOptional(RegExp exp) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_OPTIONAL;
-    r.exp1 = exp;
-    return r;
+    return newContainerNode(Kind.REGEXP_OPTIONAL, exp, null);
   }
   
   static RegExp makeRepeat(RegExp exp) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_REPEAT;
-    r.exp1 = exp;
-    return r;
+    return newContainerNode(Kind.REGEXP_REPEAT, exp, null);
   }
   
   static RegExp makeRepeat(RegExp exp, int min) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_REPEAT_MIN;
-    r.exp1 = exp;
-    r.min = min;
-    return r;
+    return newRepeatingNode(Kind.REGEXP_REPEAT_MIN, exp, min, 0);
   }
   
   static RegExp makeRepeat(RegExp exp, int min, int max) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_REPEAT_MINMAX;
-    r.exp1 = exp;
-    r.min = min;
-    r.max = max;
-    return r;
+    return newRepeatingNode(Kind.REGEXP_REPEAT_MINMAX, exp, min, max);
   }
   
   static RegExp makeComplement(RegExp exp) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_COMPLEMENT;
-    r.exp1 = exp;
-    return r;
+    return newContainerNode(Kind.REGEXP_COMPLEMENT, exp, null);
   }
   
   static RegExp makeChar(int c) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_CHAR;
-    r.c = c;
-    return r;
+    return newLeafNode(Kind.REGEXP_CHAR, null, c, 0, 0, 0, 0, 0);
   }
   
   static RegExp makeCharRange(int from, int to) {
     if (from > to) 
       throw new IllegalArgumentException("invalid range: from (" + from + ") cannot be > to (" + to + ")");
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_CHAR_RANGE;
-    r.from = from;
-    r.to = to;
-    return r;
+    return newLeafNode(Kind.REGEXP_CHAR_RANGE, null, 0, 0, 0, 0, from, to);
   }
   
   static RegExp makeAnyChar() {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_ANYCHAR;
-    return r;
+    return newContainerNode(Kind.REGEXP_ANYCHAR, null, null);
   }
   
   static RegExp makeEmpty() {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_EMPTY;
-    return r;
+    return newContainerNode(Kind.REGEXP_EMPTY, null, null);
   }
   
   static RegExp makeString(String s) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_STRING;
-    r.s = s;
-    return r;
+    return newLeafNode(Kind.REGEXP_STRING, s, 0, 0, 0, 0, 0, 0);
   }
   
   static RegExp makeAnyString() {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_ANYSTRING;
-    return r;
+    return newContainerNode(Kind.REGEXP_ANYSTRING, null, null);
   }
   
   static RegExp makeAutomaton(String s) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_AUTOMATON;
-    r.s = s;
-    return r;
+    return newLeafNode(Kind.REGEXP_AUTOMATON, s, 0, 0, 0, 0, 0, 0);
   }
   
   static RegExp makeInterval(int min, int max, int digits) {
-    RegExp r = new RegExp();
-    r.kind = Kind.REGEXP_INTERVAL;
-    r.min = min;
-    r.max = max;
-    r.digits = digits;
-    return r;
+  return newLeafNode(Kind.REGEXP_INTERVAL, null, 0, min, max, digits, 0, 0);
   }
   
   private boolean peek(String s) {
@@ -1201,10 +1232,7 @@ public class RegExp {
     //See https://docs.oracle.com/javase/tutorial/essential/regex/pre_char_classes.html
     if (match('\\')) {
       if (peek("dDwWsS")) {
-        RegExp re =new RegExp();
-        re.kind = Kind.REGEXP_PRE_CLASS;
-        re.from = next();
-        return re;
+        return newLeafNode(Kind.REGEXP_PRE_CLASS, null, 0, 0, 0, 0, next(), 0);
       }
       
       if (peek("\\")) {


[lucene-solr] 46/47: Revert "Revert "LUCENE-8962""

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 4e7c17e328edb7856f8007698c22ad233be9d6f2
Author: Michael Sokolov <so...@amazon.com>
AuthorDate: Mon Jun 1 14:45:00 2020 -0400

    Revert "Revert "LUCENE-8962""
    
    This reverts commit 4501b3d3fdbc35af99bde6abe7432cfc5e8b5547.
    
    This reverts commit 075adac59865b3277adcf86052f2fae3e6d11135.
---
 lucene/CHANGES.txt                                 |   2 +
 .../org/apache/lucene/index/FilterMergePolicy.java |   5 +
 .../java/org/apache/lucene/index/IndexWriter.java  | 114 ++++++++++++++++++++-
 .../org/apache/lucene/index/IndexWriterConfig.java |  29 ++++++
 .../org/apache/lucene/index/IndexWriterEvents.java |  57 +++++++++++
 .../apache/lucene/index/LiveIndexWriterConfig.java |  26 +++++
 .../java/org/apache/lucene/index/MergePolicy.java  |  28 ++++-
 .../java/org/apache/lucene/index/MergeTrigger.java |   7 +-
 .../org/apache/lucene/index/NoMergePolicy.java     |   3 +
 .../lucene/index/OneMergeWrappingMergePolicy.java  |   5 +
 .../lucene/index/TestIndexWriterMergePolicy.java   |  70 ++++++++++++-
 .../apache/lucene/index/MockRandomMergePolicy.java |  32 ++++++
 .../org/apache/lucene/util/LuceneTestCase.java     |   1 +
 13 files changed, 373 insertions(+), 6 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 50b7f7b..6e63ad7 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -376,6 +376,8 @@ Improvements
 
 * LUCENE-9253: KoreanTokenizer now supports custom dictionaries(system, unknown). (Namgyu Kim)
 
+* LUCENE-8962: Add ability to selectively merge on commit (Michael Froh)
+
 * LUCENE-9171: QueryBuilder can now use BoostAttributes on input token streams to selectively
   boost particular terms or synonyms in parsed queries. (Alessandro Benedetti, Alan Woodward)
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java
index eb634b4..b4e33f8 100644
--- a/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java
@@ -58,6 +58,11 @@ public class FilterMergePolicy extends MergePolicy {
   }
 
   @Override
+  public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException {
+    return in.findFullFlushMerges(mergeTrigger, segmentInfos, mergeContext);
+  }
+
+  @Override
   public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext)
       throws IOException {
     return in.useCompoundFile(infos, mergedInfo, mergeContext);
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index 52adbef..88fdb90 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -33,6 +33,8 @@ import java.util.Queue;
 import java.util.Set;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.Semaphore;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -3152,6 +3154,42 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
     }
   }
 
+  private MergePolicy.OneMerge updateSegmentInfosOnMergeFinish(MergePolicy.OneMerge merge, final SegmentInfos toCommit,
+                                                                AtomicReference<CountDownLatch> mergeLatchRef) {
+    return new MergePolicy.OneMerge(merge.segments) {
+      public void mergeFinished() throws IOException {
+        super.mergeFinished();
+        CountDownLatch mergeAwaitLatch = mergeLatchRef.get();
+        if (mergeAwaitLatch == null) {
+          // Commit thread timed out waiting for this merge and moved on. No need to manipulate toCommit.
+          return;
+        }
+        if (committed) {
+          deleter.incRef(this.info.files());
+          // Resolve "live" SegmentInfos segments to their toCommit cloned equivalents, based on segment name.
+          Set<String> mergedSegmentNames = new HashSet<>();
+          for (SegmentCommitInfo sci : this.segments) {
+            deleter.decRef(sci.files());
+            mergedSegmentNames.add(sci.info.name);
+          }
+          List<SegmentCommitInfo> toCommitMergedAwaySegments = new ArrayList<>();
+          for (SegmentCommitInfo sci : toCommit) {
+            if (mergedSegmentNames.contains(sci.info.name)) {
+              toCommitMergedAwaySegments.add(sci);
+            }
+          }
+          // Construct a OneMerge that applies to toCommit
+          MergePolicy.OneMerge applicableMerge = new MergePolicy.OneMerge(toCommitMergedAwaySegments);
+          applicableMerge.info = this.info.clone();
+          long segmentCounter = Long.parseLong(this.info.info.name.substring(1), Character.MAX_RADIX);
+          toCommit.counter = Math.max(toCommit.counter, segmentCounter + 1);
+          toCommit.applyMergeChanges(applicableMerge, false);
+        }
+        mergeAwaitLatch.countDown();
+      }
+    };
+  }
+
   private long prepareCommitInternal() throws IOException {
     startCommitTime = System.nanoTime();
     synchronized(commitLock) {
@@ -3174,6 +3212,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
       SegmentInfos toCommit = null;
       boolean anyChanges = false;
       long seqNo;
+      List<MergePolicy.OneMerge> commitMerges = null;
+      AtomicReference<CountDownLatch> mergeAwaitLatchRef = null;
 
       // This is copied from doFlush, except it's modified to
       // clone & incRef the flushed SegmentInfos inside the
@@ -3228,6 +3268,30 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
               // sneak into the commit point:
               toCommit = segmentInfos.clone();
 
+              if (anyChanges) {
+                // Find any merges that can execute on commit (per MergePolicy).
+                MergePolicy.MergeSpecification mergeSpec =
+                    config.getMergePolicy().findFullFlushMerges(MergeTrigger.COMMIT, segmentInfos, this);
+                if (mergeSpec != null && mergeSpec.merges.size() > 0) {
+                  int mergeCount = mergeSpec.merges.size();
+                  commitMerges = new ArrayList<>(mergeCount);
+                  mergeAwaitLatchRef = new AtomicReference<>(new CountDownLatch(mergeCount));
+                  for (MergePolicy.OneMerge oneMerge : mergeSpec.merges) {
+                    MergePolicy.OneMerge trackedMerge =
+                        updateSegmentInfosOnMergeFinish(oneMerge, toCommit, mergeAwaitLatchRef);
+                    if (registerMerge(trackedMerge) == false) {
+                      throw new IllegalStateException("MergePolicy " + config.getMergePolicy().getClass() +
+                          " returned merging segments from findFullFlushMerges");
+                    }
+                    commitMerges.add(trackedMerge);
+                  }
+                  if (infoStream.isEnabled("IW")) {
+                    infoStream.message("IW", "Registered " + mergeCount + " commit merges");
+                    infoStream.message("IW", "Before executing commit merges, had " + toCommit.size() + " segments");
+                  }
+                }
+              }
+
               pendingCommitChangeCount = changeCount.get();
 
               // This protects the segmentInfos we are now going
@@ -3235,8 +3299,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
               // we are trying to sync all referenced files, a
               // merge completes which would otherwise have
               // removed the files we are now syncing.    
-              filesToCommit = toCommit.files(false); 
-              deleter.incRef(filesToCommit);
+              deleter.incRef(toCommit.files(false));
             }
             success = true;
           } finally {
@@ -3257,6 +3320,52 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
       } finally {
         maybeCloseOnTragicEvent();
       }
+
+      if (mergeAwaitLatchRef != null) {
+        CountDownLatch mergeAwaitLatch = mergeAwaitLatchRef.get();
+        // If we found and registered any merges above, within the flushLock, then we want to ensure that they
+        // complete execution. Note that since we released the lock, other merges may have been scheduled. We will
+        // block until  the merges that we registered complete. As they complete, they will update toCommit to
+        // replace merged segments with the result of each merge.
+        config.getIndexWriterEvents().beginMergeOnCommit();
+        mergeScheduler.merge(mergeSource, MergeTrigger.COMMIT);
+        long mergeWaitStart = System.nanoTime();
+        int abandonedCount = 0;
+        long waitTimeMillis = (long) (config.getMaxCommitMergeWaitSeconds() * 1000.0);
+        try {
+          if (mergeAwaitLatch.await(waitTimeMillis, TimeUnit.MILLISECONDS) == false) {
+            synchronized (this) {
+              // Need to do this in a synchronized block, to make sure none of our commit merges are currently
+              // executing mergeFinished (since mergeFinished itself is called from within the IndexWriter lock).
+              // After we clear the value from mergeAwaitLatchRef, the merges we schedule will still execute as
+              // usual, but when they finish, they won't attempt to update toCommit or modify segment reference
+              // counts.
+              mergeAwaitLatchRef.set(null);
+              for (MergePolicy.OneMerge commitMerge : commitMerges) {
+                if (runningMerges.contains(commitMerge) || pendingMerges.contains(commitMerge)) {
+                  abandonedCount++;
+                }
+              }
+            }
+          }
+        } catch (InterruptedException ie) {
+          throw new ThreadInterruptedException(ie);
+        } finally {
+          if (infoStream.isEnabled("IW")) {
+            infoStream.message("IW", String.format(Locale.ROOT, "Waited %.1f ms for commit merges",
+                (System.nanoTime() - mergeWaitStart)/1_000_000.0));
+            infoStream.message("IW", "After executing commit merges, had " + toCommit.size() + " segments");
+            if (abandonedCount > 0) {
+              infoStream.message("IW", "Abandoned " + abandonedCount + " commit merges after " + waitTimeMillis + " ms");
+            }
+          }
+          if (abandonedCount > 0) {
+            config.getIndexWriterEvents().abandonedMergesOnCommit(abandonedCount);
+          }
+          config.getIndexWriterEvents().finishMergeOnCommit();
+        }
+      }
+      filesToCommit = toCommit.files(false);
      
       try {
         if (anyChanges) {
@@ -3962,6 +4071,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
     }
 
     try (Closeable finalizer = this::checkpoint) {
+      merge.committed = true;
       // Must close before checkpoint, otherwise IFD won't be
       // able to delete the held-open files from the merge
       // readers:
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
index 26e7e3d..629b1e8 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
@@ -19,6 +19,7 @@ package org.apache.lucene.index;
 
 import java.io.PrintStream;
 import java.util.Arrays;
+import java.util.EnumSet;
 import java.util.stream.Collectors;
 
 import org.apache.lucene.analysis.Analyzer;
@@ -109,6 +110,9 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
   
   /** Default value for whether calls to {@link IndexWriter#close()} include a commit. */
   public final static boolean DEFAULT_COMMIT_ON_CLOSE = true;
+
+  /** Default value for time to wait for merges on commit (when using a {@link MergePolicy} that implements findFullFlushMerges). */
+  public static final double DEFAULT_MAX_COMMIT_MERGE_WAIT_SECONDS = 30.0;
   
   // indicates whether this config instance is already attached to a writer.
   // not final so that it can be cloned properly.
@@ -460,6 +464,31 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
   }
 
   /**
+   * Expert: sets the amount of time to wait for merges returned by MergePolicy.findFullFlushMerges(...).
+   * If this time is reached, we proceed with the commit based on segments merged up to that point.
+   * The merges are not cancelled, and may still run to completion independent of the commit.
+   */
+  public IndexWriterConfig setMaxCommitMergeWaitSeconds(double maxCommitMergeWaitSeconds) {
+    this.maxCommitMergeWaitSeconds = maxCommitMergeWaitSeconds;
+    return this;
+  }
+
+  /**
+   * Set the callback that gets invoked when IndexWriter performs various actions.
+   */
+  public IndexWriterConfig setIndexWriterEvents(IndexWriterEvents indexWriterEvents) {
+    this.indexWriterEvents = indexWriterEvents;
+    return this;
+  }
+
+  /** We only allow sorting on these types */
+  private static final EnumSet<SortField.Type> ALLOWED_INDEX_SORT_TYPES = EnumSet.of(SortField.Type.STRING,
+                                                                                     SortField.Type.LONG,
+                                                                                     SortField.Type.INT,
+                                                                                     SortField.Type.DOUBLE,
+                                                                                     SortField.Type.FLOAT);
+
+  /**
    * Set the {@link Sort} order to use for all (flushed and merged) segments.
    */
   public IndexWriterConfig setIndexSort(Sort sort) {
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterEvents.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterEvents.java
new file mode 100644
index 0000000..d36fb25
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterEvents.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.index;
+
+/**
+ * Callback interface to signal various actions taken by IndexWriter.
+ *
+ * @lucene.experimental
+ */
+public interface IndexWriterEvents {
+  /**
+   * A default implementation that ignores all events.
+   */
+  IndexWriterEvents NULL_EVENTS = new IndexWriterEvents() {
+    @Override
+    public void beginMergeOnCommit() { }
+
+    @Override
+    public void finishMergeOnCommit() { }
+
+    @Override
+    public void abandonedMergesOnCommit(int abandonedCount) { }
+  };
+
+  /**
+   * Signals the start of waiting for a merge on commit, returned from
+   * {@link MergePolicy#findFullFlushMerges(MergeTrigger, SegmentInfos, MergePolicy.MergeContext)}.
+   */
+  void beginMergeOnCommit();
+
+  /**
+   * Signals the end of waiting for merges on commit. This may be either because the merges completed, or because we timed out according
+   * to the limit set in {@link IndexWriterConfig#setMaxCommitMergeWaitSeconds(double)}.
+   */
+  void finishMergeOnCommit();
+
+  /**
+   * Called to signal that we abandoned some merges on commit upon reaching the timeout specified in
+   * {@link IndexWriterConfig#setMaxCommitMergeWaitSeconds(double)}.
+   */
+  void abandonedMergesOnCommit(int abandonedCount);
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
index 1f48acc..59a54c7 100644
--- a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java
@@ -109,6 +109,12 @@ public class LiveIndexWriterConfig {
   /** soft deletes field */
   protected String softDeletesField = null;
 
+  /** Amount of time to wait for merges returned by MergePolicy.findFullFlushMerges(...) */
+  protected volatile double maxCommitMergeWaitSeconds;
+
+  /** Callback interface called on index writer actions. */
+  protected IndexWriterEvents indexWriterEvents;
+
 
   // used by IndexWriterConfig
   LiveIndexWriterConfig(Analyzer analyzer) {
@@ -132,6 +138,8 @@ public class LiveIndexWriterConfig {
     flushPolicy = new FlushByRamOrCountsPolicy();
     readerPooling = IndexWriterConfig.DEFAULT_READER_POOLING;
     perThreadHardLimitMB = IndexWriterConfig.DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB;
+    maxCommitMergeWaitSeconds = IndexWriterConfig.DEFAULT_MAX_COMMIT_MERGE_WAIT_SECONDS;
+    indexWriterEvents = IndexWriterEvents.NULL_EVENTS;
   }
   
   /** Returns the default analyzer to use for indexing documents. */
@@ -461,6 +469,22 @@ public class LiveIndexWriterConfig {
     return softDeletesField;
   }
 
+  /**
+   * Expert: return the amount of time to wait for merges returned by by MergePolicy.findFullFlushMerges(...).
+   * If this time is reached, we proceed with the commit based on segments merged up to that point.
+   * The merges are not cancelled, and may still run to completion independent of the commit.
+   */
+  public double getMaxCommitMergeWaitSeconds() {
+    return maxCommitMergeWaitSeconds;
+  }
+
+  /**
+   * Returns a callback used to signal actions taken by the {@link IndexWriter}.
+   */
+  public IndexWriterEvents getIndexWriterEvents() {
+    return indexWriterEvents;
+  }
+
   @Override
   public String toString() {
     StringBuilder sb = new StringBuilder();
@@ -484,6 +508,8 @@ public class LiveIndexWriterConfig {
     sb.append("indexSort=").append(getIndexSort()).append("\n");
     sb.append("checkPendingFlushOnUpdate=").append(isCheckPendingFlushOnUpdate()).append("\n");
     sb.append("softDeletesField=").append(getSoftDeletesField()).append("\n");
+    sb.append("maxCommitMergeWaitSeconds=").append(getMaxCommitMergeWaitSeconds()).append("\n");
+    sb.append("indexWriterEvents=").append(getIndexWriterEvents().getClass().getName()).append("\n");
     return sb.toString();
   }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
index 3ac3914..13fb2db 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java
@@ -225,6 +225,8 @@ public abstract class MergePolicy {
     public final int totalMaxDoc;
     Throwable error;
 
+    boolean committed; // Set by IndexWriter once the merge has been committed to disk
+
     /** Sole constructor.
      * @param segments List of {@link SegmentCommitInfo}s
      *        to be merged. */
@@ -500,7 +502,7 @@ public abstract class MergePolicy {
  *          an original segment present in the
  *          to-be-merged index; else, it was a segment
  *          produced by a cascaded merge.
-   * @param mergeContext the IndexWriter to find the merges on
+   * @param mergeContext the MergeContext to find the merges on
    */
   public abstract MergeSpecification findForcedMerges(
       SegmentInfos segmentInfos, int maxSegmentCount, Map<SegmentCommitInfo,Boolean> segmentsToMerge, MergeContext mergeContext)
@@ -511,12 +513,34 @@ public abstract class MergePolicy {
    * deletes from the index.
    *  @param segmentInfos
    *          the total set of segments in the index
-   * @param mergeContext the IndexWriter to find the merges on
+   * @param mergeContext the MergeContext to find the merges on
    */
   public abstract MergeSpecification findForcedDeletesMerges(
       SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException;
 
   /**
+   * Identifies merges that we want to execute (synchronously) on commit. By default, do not synchronously merge on commit.
+   *
+   * Any merges returned here will make {@link IndexWriter#commit()} or {@link IndexWriter#prepareCommit()} block until
+   * the merges complete or until {@link IndexWriterConfig#getMaxCommitMergeWaitSeconds()} have elapsed. This may be
+   * used to merge small segments that have just been flushed as part of the commit, reducing the number of segments in
+   * the commit. If a merge does not complete in the allotted time, it will continue to execute, but will not be reflected
+   * in the commit.
+   *
+   * If a {@link OneMerge} in the returned {@link MergeSpecification} includes a segment already included in a registered
+   * merge, then {@link IndexWriter#commit()} or {@link IndexWriter#prepareCommit()} will throw a {@link IllegalStateException}.
+   * Use {@link MergeContext#getMergingSegments()} to determine which segments are currently registered to merge.
+   *
+   * @param mergeTrigger the event that triggered the merge (COMMIT or FULL_FLUSH).
+   * @param segmentInfos the total set of segments in the index (while preparing the commit)
+   * @param mergeContext the MergeContext to find the merges on, which should be used to determine which segments are
+ *                     already in a registered merge (see {@link MergeContext#getMergingSegments()}).
+   */
+  public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException {
+    return null;
+  }
+
+  /**
    * Returns true if a new segment (regardless of its origin) should use the
    * compound file format. The default implementation returns <code>true</code>
    * iff the size of the given mergedInfo is less or equal to
diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
index d165a27..01a6b15 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MergeTrigger.java
@@ -47,5 +47,10 @@ public enum MergeTrigger {
   /**
    * Merge was triggered by a closing IndexWriter.
    */
-  CLOSING
+  CLOSING,
+
+  /**
+   * Merge was triggered on commit.
+   */
+  COMMIT,
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java
index 1480ce4..b209e8ae 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java
@@ -46,6 +46,9 @@ public final class NoMergePolicy extends MergePolicy {
   public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, MergeContext mergeContext) { return null; }
 
   @Override
+  public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) { return null; }
+
+  @Override
   public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment, MergeContext mergeContext) {
     return newSegment.info.getUseCompoundFile();
   }
diff --git a/lucene/core/src/java/org/apache/lucene/index/OneMergeWrappingMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/OneMergeWrappingMergePolicy.java
index d08711e..a5fd66a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/OneMergeWrappingMergePolicy.java
+++ b/lucene/core/src/java/org/apache/lucene/index/OneMergeWrappingMergePolicy.java
@@ -59,6 +59,11 @@ public class OneMergeWrappingMergePolicy extends FilterMergePolicy {
     return wrapSpec(in.findForcedDeletesMerges(segmentInfos, mergeContext));
   }
 
+  @Override
+  public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException {
+    return wrapSpec(in.findFullFlushMerges(mergeTrigger, segmentInfos, mergeContext));
+  }
+
   private MergeSpecification wrapSpec(MergeSpecification spec) {
     MergeSpecification wrapped = spec == null ? null : new MergeSpecification();
     if (wrapped != null) {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
index ce591a2..8a463ef 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
@@ -18,17 +18,42 @@ package org.apache.lucene.index;
 
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.lucene.store.Directory;
 
 import org.apache.lucene.util.LuceneTestCase;
 
 public class TestIndexWriterMergePolicy extends LuceneTestCase {
-  
+
+  private static final MergePolicy MERGE_ON_COMMIT_POLICY = new LogDocMergePolicy() {
+    @Override
+    public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) {
+      // Optimize down to a single segment on commit
+      if (mergeTrigger == MergeTrigger.COMMIT && segmentInfos.size() > 1) {
+        List<SegmentCommitInfo> nonMergingSegments = new ArrayList<>();
+        for (SegmentCommitInfo sci : segmentInfos) {
+          if (mergeContext.getMergingSegments().contains(sci) == false) {
+            nonMergingSegments.add(sci);
+          }
+        }
+        if (nonMergingSegments.size() > 1) {
+          MergeSpecification mergeSpecification = new MergeSpecification();
+          mergeSpecification.add(new OneMerge(nonMergingSegments));
+          return mergeSpecification;
+        }
+      }
+      return null;
+    }
+  };
+
   // Test the normal case
   public void testNormalCase() throws IOException {
     Directory dir = newDirectory();
@@ -278,6 +303,49 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
     assertSetters(new LogDocMergePolicy());
   }
 
+  // Test basic semantics of merge on commit
+  public void testMergeOnCommit() throws IOException {
+    Directory dir = newDirectory();
+
+    IndexWriter firstWriter = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
+        .setMergePolicy(NoMergePolicy.INSTANCE));
+    for (int i = 0; i < 5; i++) {
+      TestIndexWriter.addDoc(firstWriter);
+      firstWriter.flush();
+    }
+    DirectoryReader firstReader = DirectoryReader.open(firstWriter);
+    assertEquals(5, firstReader.leaves().size());
+    firstReader.close();
+    firstWriter.close(); // When this writer closes, it does not merge on commit.
+
+    IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
+        .setMergePolicy(MERGE_ON_COMMIT_POLICY);
+
+    IndexWriter writerWithMergePolicy = new IndexWriter(dir, iwc);
+    writerWithMergePolicy.commit(); // No changes. Commit doesn't trigger a merge.
+
+    DirectoryReader unmergedReader = DirectoryReader.open(writerWithMergePolicy);
+    assertEquals(5, unmergedReader.leaves().size());
+    unmergedReader.close();
+
+    TestIndexWriter.addDoc(writerWithMergePolicy);
+    writerWithMergePolicy.commit(); // Doc added, do merge on commit.
+    assertEquals(1, writerWithMergePolicy.getSegmentCount()); //
+
+    DirectoryReader mergedReader = DirectoryReader.open(writerWithMergePolicy);
+    assertEquals(1, mergedReader.leaves().size());
+    mergedReader.close();
+
+    try (IndexReader reader = writerWithMergePolicy.getReader()) {
+      IndexSearcher searcher = new IndexSearcher(reader);
+      assertEquals(6, reader.numDocs());
+      assertEquals(6, searcher.count(new MatchAllDocsQuery()));
+    }
+
+    writerWithMergePolicy.close();
+    dir.close();
+  }
+
   private void assertSetters(MergePolicy lmp) {
     lmp.setMaxCFSSegmentSizeMB(2.0);
     assertEquals(2.0, lmp.getMaxCFSSegmentSizeMB(), EPSILON);
diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
index beb4dad..92ffc73 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/index/MockRandomMergePolicy.java
@@ -129,6 +129,38 @@ public class MockRandomMergePolicy extends MergePolicy {
   }
 
   @Override
+  public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException {
+    MergeSpecification mergeSpecification = findMerges(null, segmentInfos, mergeContext);
+    if (mergeSpecification == null) {
+      return null;
+    }
+    // Do not return any merges involving already-merging segments.
+    MergeSpecification filteredMergeSpecification = new MergeSpecification();
+    for (OneMerge oneMerge : mergeSpecification.merges) {
+      boolean filtered = false;
+      List<SegmentCommitInfo> nonMergingSegments = new ArrayList<>();
+      for (SegmentCommitInfo sci : oneMerge.segments) {
+        if (mergeContext.getMergingSegments().contains(sci) == false) {
+          nonMergingSegments.add(sci);
+        } else {
+          filtered = true;
+        }
+      }
+      if (filtered == true) {
+        if (nonMergingSegments.size() > 0) {
+          filteredMergeSpecification.add(new OneMerge(nonMergingSegments));
+        }
+      } else {
+        filteredMergeSpecification.add(oneMerge);
+      }
+    }
+    if (filteredMergeSpecification.merges.size() > 0) {
+      return filteredMergeSpecification;
+    }
+    return null;
+  }
+
+  @Override
   public boolean useCompoundFile(SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext) throws IOException {
     // 80% of the time we create CFS:
     return random.nextInt(5) != 1;
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index 9f2cd27..cc779a0 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -1003,6 +1003,7 @@ public abstract class LuceneTestCase extends Assert {
     if (rarely(r)) {
       c.setCheckPendingFlushUpdate(false);
     }
+    c.setMaxCommitMergeWaitSeconds(atLeast(r, 1));
     return c;
   }
 


[lucene-solr] 26/47: LUCENE-9380: Fix auxiliary class warnings in Lucene

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3fe52dd60dd5a328f9899f907093562cd0c6823f
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Wed May 27 12:36:39 2020 -0400

    LUCENE-9380: Fix auxiliary class warnings in Lucene
---
 lucene/CHANGES.txt                                 |   2 +
 .../function/valuesource/DocFreqValueSource.java   | 175 ++++++++++-----------
 .../function/valuesource/IDFValueSource.java       |   2 +-
 .../function/valuesource/MaxDocValueSource.java    |   2 +-
 .../function/valuesource/NumDocsValueSource.java   |   2 +-
 5 files changed, 92 insertions(+), 91 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 078e8b5..c8b382f 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -292,6 +292,8 @@ Build
 * LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search
   (Andras Salamon via Erick Erickson)
 
+* LUCENE-9380: Fix auxiliary class warnings in Lucene (Erick Erickson)
+
 ======================= Lucene 8.5.1 =======================
 
 Bug Fixes
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java
index e03e316..bb67627 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/DocFreqValueSource.java
@@ -29,94 +29,6 @@ import java.io.IOException;
 import java.util.Map;
 
 
-class ConstIntDocValues extends IntDocValues {
-  final int ival;
-  final float fval;
-  final double dval;
-  final long lval;
-  final String sval;
-  final ValueSource parent;
-
-  ConstIntDocValues(int val, ValueSource parent) {
-    super(parent);
-    ival = val;
-    fval = val;
-    dval = val;
-    lval = val;
-    sval = Integer.toString(val);
-    this.parent = parent;
-  }
-
-  @Override
-  public float floatVal(int doc) {
-    return fval;
-  }
-  @Override
-  public int intVal(int doc) {
-    return ival;
-  }
-  @Override
-  public long longVal(int doc) {
-    return lval;
-  }
-  @Override
-  public double doubleVal(int doc) {
-    return dval;
-  }
-  @Override
-  public String strVal(int doc) {
-    return sval;
-  }
-  @Override
-  public String toString(int doc) {
-    return parent.description() + '=' + sval;
-  }
-}
-
-class ConstDoubleDocValues extends DoubleDocValues {
-  final int ival;
-  final float fval;
-  final double dval;
-  final long lval;
-  final String sval;
-  final ValueSource parent;
-
-  ConstDoubleDocValues(double val, ValueSource parent) {
-    super(parent);
-    ival = (int)val;
-    fval = (float)val;
-    dval = val;
-    lval = (long)val;
-    sval = Double.toString(val);
-    this.parent = parent;
-  }
-
-  @Override
-  public float floatVal(int doc) {
-    return fval;
-  }
-  @Override
-  public int intVal(int doc) {
-    return ival;
-  }
-  @Override
-  public long longVal(int doc) {
-    return lval;
-  }
-  @Override
-  public double doubleVal(int doc) {
-    return dval;
-  }
-  @Override
-  public String strVal(int doc) {
-    return sval;
-  }
-  @Override
-  public String toString(int doc) {
-    return parent.description() + '=' + sval;
-  }
-}
-
 
 /**
  * <code>DocFreqValueSource</code> returns the number of documents containing the term.
@@ -167,5 +79,92 @@ public class DocFreqValueSource extends ValueSource {
     DocFreqValueSource other = (DocFreqValueSource)o;
     return this.indexedField.equals(other.indexedField) && this.indexedBytes.equals(other.indexedBytes);
   }
+  static class ConstIntDocValues extends IntDocValues {
+    final int ival;
+    final float fval;
+    final double dval;
+    final long lval;
+    final String sval;
+    final ValueSource parent;
+
+    ConstIntDocValues(int val, ValueSource parent) {
+      super(parent);
+      ival = val;
+      fval = val;
+      dval = val;
+      lval = val;
+      sval = Integer.toString(val);
+      this.parent = parent;
+    }
+
+    @Override
+    public float floatVal(int doc) {
+      return fval;
+    }
+    @Override
+    public int intVal(int doc) {
+      return ival;
+    }
+    @Override
+    public long longVal(int doc) {
+      return lval;
+    }
+    @Override
+    public double doubleVal(int doc) {
+      return dval;
+    }
+    @Override
+    public String strVal(int doc) {
+      return sval;
+    }
+    @Override
+    public String toString(int doc) {
+      return parent.description() + '=' + sval;
+    }
+  }
+
+  static class ConstDoubleDocValues extends DoubleDocValues {
+    final int ival;
+    final float fval;
+    final double dval;
+    final long lval;
+    final String sval;
+    final ValueSource parent;
+
+    ConstDoubleDocValues(double val, ValueSource parent) {
+      super(parent);
+      ival = (int)val;
+      fval = (float)val;
+      dval = val;
+      lval = (long)val;
+      sval = Double.toString(val);
+      this.parent = parent;
+    }
+
+    @Override
+    public float floatVal(int doc) {
+      return fval;
+    }
+    @Override
+    public int intVal(int doc) {
+      return ival;
+    }
+    @Override
+    public long longVal(int doc) {
+      return lval;
+    }
+    @Override
+    public double doubleVal(int doc) {
+      return dval;
+    }
+    @Override
+    public String strVal(int doc) {
+      return sval;
+    }
+    @Override
+    public String toString(int doc) {
+      return parent.description() + '=' + sval;
+    }
+  }
 }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java
index 4192f2d..7d2afb1 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/IDFValueSource.java
@@ -53,7 +53,7 @@ public class IDFValueSource extends DocFreqValueSource {
     }
     int docfreq = searcher.getIndexReader().docFreq(new Term(indexedField, indexedBytes));
     float idf = sim.idf(docfreq, searcher.getIndexReader().maxDoc());
-    return new ConstDoubleDocValues(idf, this);
+    return new DocFreqValueSource.ConstDoubleDocValues(idf, this);
   }
   
   // tries extra hard to cast the sim to TFIDFSimilarity
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java
index ccd7326..87eeddc 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/MaxDocValueSource.java
@@ -48,7 +48,7 @@ public class MaxDocValueSource extends ValueSource {
   @Override
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     IndexSearcher searcher = (IndexSearcher)context.get("searcher");
-    return new ConstIntDocValues(searcher.getIndexReader().maxDoc(), this);
+    return new DocFreqValueSource.ConstIntDocValues(searcher.getIndexReader().maxDoc(), this);
   }
 
   @Override
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java
index 6f92f1e..21e881d 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/NumDocsValueSource.java
@@ -43,7 +43,7 @@ public class NumDocsValueSource extends ValueSource {
   @Override
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     // Searcher has no numdocs so we must use the reader instead
-    return new ConstIntDocValues(ReaderUtil.getTopLevelContext(readerContext).reader().numDocs(), this);
+    return new DocFreqValueSource.ConstIntDocValues(ReaderUtil.getTopLevelContext(readerContext).reader().numDocs(), this);
   }
 
   @Override


[lucene-solr] 29/47: SOLR-14498: BlockCache gets stuck not accepting new stores. Fix gradle :solr:core:validateJarChecksums

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit abd1f6a31c51f419a91f4675c7472516658b601e
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Wed May 27 20:24:43 2020 -0400

    SOLR-14498: BlockCache gets stuck not accepting new stores. Fix gradle :solr:core:validateJarChecksums
---
 solr/licenses/caffeine-2.8.0.jar.sha1 | 1 +
 solr/licenses/caffeine-2.8.4.jar.sha1 | 1 -
 2 files changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/licenses/caffeine-2.8.0.jar.sha1 b/solr/licenses/caffeine-2.8.0.jar.sha1
new file mode 100644
index 0000000..ce291c4
--- /dev/null
+++ b/solr/licenses/caffeine-2.8.0.jar.sha1
@@ -0,0 +1 @@
+6000774d7f8412ced005a704188ced78beeed2bb
diff --git a/solr/licenses/caffeine-2.8.4.jar.sha1 b/solr/licenses/caffeine-2.8.4.jar.sha1
deleted file mode 100644
index 813e00d..0000000
--- a/solr/licenses/caffeine-2.8.4.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e5730b11981406faa28e0912405a0ce7c2d0f377


[lucene-solr] 02/47: SOLR-14472: Autoscale "cores": use metrics to count Also counts all cores (lazy, transient), although currently impossible to use these in SolrCloud.

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 648e0683bb41cf7221bfb6c4f0f33586eab74109
Author: David Smiley <ds...@salesforce.com>
AuthorDate: Mon May 11 17:05:53 2020 -0400

    SOLR-14472: Autoscale "cores": use metrics to count
    Also counts all cores (lazy, transient), although currently impossible to use these in SolrCloud.
---
 .../solrj/impl/SolrClientNodeStateProvider.java    | 23 +++++++++++-----------
 1 file changed, 12 insertions(+), 11 deletions(-)

diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
index 4f63525..796207a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
@@ -66,7 +66,7 @@ import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.TOTAL
 import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.WITH_COLLECTION;
 
 /**
- *
+ * The <em>real</em> {@link NodeStateProvider}, which communicates with Solr via SolrJ.
  */
 public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -254,8 +254,8 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
         prefixes.add("CONTAINER.fs.totalSpace");
       }
       if (requestedTags.contains(CORES)) {
-        groups.add("solr.core");
-        prefixes.add("CORE.coreName");
+        groups.add("solr.node");
+        prefixes.add("CONTAINER.cores");
       }
       if (requestedTags.contains(SYSLOADAVG)) {
         groups.add("solr.jvm");
@@ -273,30 +273,31 @@ public class SolrClientNodeStateProvider implements NodeStateProvider, MapWriter
 
       try {
         SimpleSolrResponse rsp = snitchContext.invokeWithRetry(solrNode, CommonParams.METRICS_PATH, params);
+        NamedList<?> metrics = (NamedList<?>) rsp.nl.get("metrics");
 
-        Map m = rsp.nl.asMap(4);
         if (requestedTags.contains(FREEDISK.tagName)) {
-          Object n = Utils.getObjectByPath(m, true, "metrics/solr.node/CONTAINER.fs.usableSpace");
+          Object n = Utils.getObjectByPath(metrics, true, "solr.node/CONTAINER.fs.usableSpace");
           if (n != null) ctx.getTags().put(FREEDISK.tagName, FREEDISK.convertVal(n));
         }
         if (requestedTags.contains(TOTALDISK.tagName)) {
-          Object n = Utils.getObjectByPath(m, true, "metrics/solr.node/CONTAINER.fs.totalSpace");
+          Object n = Utils.getObjectByPath(metrics, true, "solr.node/CONTAINER.fs.totalSpace");
           if (n != null) ctx.getTags().put(TOTALDISK.tagName, TOTALDISK.convertVal(n));
         }
         if (requestedTags.contains(CORES)) {
+          NamedList<?> node = (NamedList<?>) metrics.get("solr.node");
           int count = 0;
-          Map cores = (Map) m.get("metrics");
-          for (Object o : cores.keySet()) {
-            if (o.toString().startsWith("solr.core.")) count++;
+          for (String leafCoreMetricName : new String[]{"lazy", "loaded", "unloaded"}) {
+            Number n = (Number) node.get("CONTAINER.cores." + leafCoreMetricName);
+            if (n != null) count += n.intValue();
           }
           ctx.getTags().put(CORES, count);
         }
         if (requestedTags.contains(SYSLOADAVG)) {
-          Number n = (Number) Utils.getObjectByPath(m, true, "metrics/solr.jvm/os.systemLoadAverage");
+          Number n = (Number) Utils.getObjectByPath(metrics, true, "solr.jvm/os.systemLoadAverage");
           if (n != null) ctx.getTags().put(SYSLOADAVG, n.doubleValue() * 100.0d);
         }
         if (requestedTags.contains(HEAPUSAGE)) {
-          Number n = (Number) Utils.getObjectByPath(m, true, "metrics/solr.jvm/memory.heap.usage");
+          Number n = (Number) Utils.getObjectByPath(metrics, true, "solr.jvm/memory.heap.usage");
           if (n != null) ctx.getTags().put(HEAPUSAGE, n.doubleValue() * 100.0d);
         }
       } catch (Exception e) {


[lucene-solr] 23/47: SOLR-14280: SolrConfig error handling improvements

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit aa787586968d6bedeab7927f9f9df217f5e2046a
Author: Jason Gerlowski <ja...@lucidworks.com>
AuthorDate: Tue May 26 08:13:54 2020 -0400

    SOLR-14280: SolrConfig error handling improvements
---
 solr/CHANGES.txt                                        | 2 ++
 solr/core/src/java/org/apache/solr/core/SolrConfig.java | 6 +++---
 2 files changed, 5 insertions(+), 3 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index a5cf874..0e09e49 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -261,6 +261,8 @@ Other Changes
 
 * SOLR-14495: Fix or suppress warnings in solr/search/function (Erick Erickson)
 
+* SOLR-14280: Improve error reporting in SolrConfig (Andras Salamon via Jason Gerlowski)
+
 ==================  8.5.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index 0414260..68179ef 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -755,7 +755,7 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
       try {
         urls.addAll(SolrResourceLoader.getURLs(libPath));
       } catch (IOException e) {
-        log.warn("Couldn't add files from {} to classpath: {}", libPath, e.getMessage());
+        log.warn("Couldn't add files from {} to classpath: {}", libPath, e);
       }
     }
 
@@ -781,14 +781,14 @@ public class SolrConfig extends XmlConfigFile implements MapSerializable {
           else
             urls.addAll(SolrResourceLoader.getFilteredURLs(dir, regex));
         } catch (IOException e) {
-          log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e.getMessage());
+          log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e);
         }
       } else if (null != path) {
         final Path dir = instancePath.resolve(path);
         try {
           urls.add(dir.toUri().toURL());
         } catch (MalformedURLException e) {
-          log.warn("Couldn't add file {} to classpath: {}", dir, e.getMessage());
+          log.warn("Couldn't add file {} to classpath: {}", dir, e);
         }
       } else {
         throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'");


[lucene-solr] 05/47: SOLR-14476: Fix precommit

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit d68673da9850a1053916dfd1f77f1f2fc9f1586b
Author: Joel Bernstein <jb...@apache.org>
AuthorDate: Mon May 18 20:38:06 2020 -0400

    SOLR-14476: Fix precommit
---
 .../src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java    | 2 --
 1 file changed, 2 deletions(-)

diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
index 8747565..0fd7246 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/StatsStream.java
@@ -303,7 +303,6 @@ public class StatsStream extends TupleStream implements Expressible  {
         ++metricCount;
       }
     }
-    //buf.append("}");
   }
 
   private void getTuples(NamedList response,
@@ -311,7 +310,6 @@ public class StatsStream extends TupleStream implements Expressible  {
 
     this.tuple = new Tuple(new HashMap());
     NamedList facets = (NamedList)response.get("facets");
-    System.out.println("###### Facets:"+facets);
     fillTuple(tuple, facets, metrics);
   }
 


[lucene-solr] 44/47: LUCENE-9301: include build time and user name only in non-snapshot builds so that jars are not recompiled on each build in development.

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit b20ceb26bb47b2b0430e0ec82914d4fd0b261b31
Author: Dawid Weiss <dw...@apache.org>
AuthorDate: Mon Jun 1 10:00:46 2020 +0200

    LUCENE-9301: include build time and user name only in non-snapshot builds so that jars are not recompiled on each build in development.
---
 build.gradle               | 2 ++
 gradle/jar-manifest.gradle | 9 ++++++++-
 2 files changed, 10 insertions(+), 1 deletion(-)

diff --git a/build.gradle b/build.gradle
index 2e80ed8..3a08ec4 100644
--- a/build.gradle
+++ b/build.gradle
@@ -54,6 +54,8 @@ ext {
     }
     return m[0][1] as int
   }
+  // snapshot build marker used in scripts.
+  snapshotBuild = version.contains("SNAPSHOT")
 
   // Build timestamp.
   def tstamp = ZonedDateTime.now()
diff --git a/gradle/jar-manifest.gradle b/gradle/jar-manifest.gradle
index 69b4116..9b2cd68 100644
--- a/gradle/jar-manifest.gradle
+++ b/gradle/jar-manifest.gradle
@@ -48,7 +48,14 @@ allprojects {
                     // awkward on import and resolves provider properties even though task dependencies
                     // have not been run yet?
                     def gitRev = rootProject.hasProperty("gitRev") ? rootProject.gitRev : ""
-                    "${project.version} ${gitRev} - ${System.properties['user.name']} - ${buildDate} ${buildTime}"
+
+                    // For snapshot builds just include the project version and gitRev so that
+                    // JARs don't need to be recompiled just because the manifest has changed.
+                    if (snapshotBuild) {
+                      return "${project.version} ${gitRev} [snapshot build, details omitted]"
+                    } else {
+                      return "${project.version} ${gitRev} - ${System.properties['user.name']} - ${buildDate} ${buildTime}"
+                    }
                 }
 
                 manifest {


[lucene-solr] 38/47: Revert "LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)"

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 8cfa6a0963287e23b3654d1aa451291e8b216d3a
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Fri May 29 15:45:51 2020 +0200

    Revert "LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)"
    
    This reverts commit bfb6bf9c9aafc778a88000e87f082d82dba9872c.
---
 lucene/CHANGES.txt                                 |   3 -
 .../java/org/apache/lucene/index/SegmentInfos.java | 231 ++++++++++-----------
 .../org/apache/lucene/index/TestSegmentInfos.java  |  61 ------
 3 files changed, 113 insertions(+), 182 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 50b7f7b..cd42f6e 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -195,9 +195,6 @@ Improvements
 * LUCENE-9342: TotalHits' relation will be EQUAL_TO when the number of hits is lower than TopDocsColector's numHits
   (Tomás Fernández Löbbe)
 
-* LUCENE-9359: SegmentInfos#readCommit now always returns a
-  CorruptIndexException if the content of the file is invalid. (Adrien Grand)
-
 Optimizations
 ---------------------
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
index 5475fbd..f9edccd 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
@@ -304,141 +304,136 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
 
   /** Read the commit from the provided {@link ChecksumIndexInput}. */
   public static final SegmentInfos readCommit(Directory directory, ChecksumIndexInput input, long generation) throws IOException {
-    Throwable priorE = null;
-    try {
-      // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
-      // to read the magic ourselves.
-      int magic = input.readInt();
-      if (magic != CodecUtil.CODEC_MAGIC) {
-        throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
-      }
-      int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
-      byte id[] = new byte[StringHelper.ID_LENGTH];
-      input.readBytes(id, 0, id.length);
-      CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
-
-      Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-      int indexCreatedVersion = input.readVInt();
-      if (luceneVersion.major < indexCreatedVersion) {
-        throw new CorruptIndexException("Creation version [" + indexCreatedVersion
-            + ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
-      }
 
-      if (indexCreatedVersion < Version.LATEST.major - 1) {
-        throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
-            + indexCreatedVersion + ".x while the current version is " + Version.LATEST
-            + " and Lucene only supports reading the current and previous major versions.");
-      }
+    // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
+    // to read the magic ourselves.
+    int magic = input.readInt();
+    if (magic != CodecUtil.CODEC_MAGIC) {
+      throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
+    }
+    int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
+    byte id[] = new byte[StringHelper.ID_LENGTH];
+    input.readBytes(id, 0, id.length);
+    CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
+
+    Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
+    int indexCreatedVersion = input.readVInt();
+    if (luceneVersion.major < indexCreatedVersion) {
+      throw new CorruptIndexException("Creation version [" + indexCreatedVersion
+          + ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
+    }
 
-      SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
-      infos.id = id;
-      infos.generation = generation;
-      infos.lastGeneration = generation;
-      infos.luceneVersion = luceneVersion;
+    if (indexCreatedVersion < Version.LATEST.major - 1) {
+      throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
+          + indexCreatedVersion + ".x while the current version is " + Version.LATEST
+          + " and Lucene only supports reading the current and previous major versions.");
+    }
 
-      infos.version = input.readLong();
-      //System.out.println("READ sis version=" + infos.version);
-      if (format > VERSION_70) {
-        infos.counter = input.readVLong();
-      } else {
-        infos.counter = input.readInt();
+    SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
+    infos.id = id;
+    infos.generation = generation;
+    infos.lastGeneration = generation;
+    infos.luceneVersion = luceneVersion;
+
+    infos.version = input.readLong();
+    //System.out.println("READ sis version=" + infos.version);
+    if (format > VERSION_70) {
+      infos.counter = input.readVLong();
+    } else {
+      infos.counter = input.readInt();
+    }
+    int numSegments = input.readInt();
+    if (numSegments < 0) {
+      throw new CorruptIndexException("invalid segment count: " + numSegments, input);
+    }
+
+    if (numSegments > 0) {
+      infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
+    } else {
+      // else leave as null: no segments
+    }
+
+    long totalDocs = 0;
+    for (int seg = 0; seg < numSegments; seg++) {
+      String segName = input.readString();
+      byte[] segmentID = new byte[StringHelper.ID_LENGTH];
+      input.readBytes(segmentID, 0, segmentID.length);
+      Codec codec = readCodec(input);
+      SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
+      info.setCodec(codec);
+      totalDocs += info.maxDoc();
+      long delGen = input.readLong();
+      int delCount = input.readInt();
+      if (delCount < 0 || delCount > info.maxDoc()) {
+        throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
       }
-      int numSegments = input.readInt();
-      if (numSegments < 0) {
-        throw new CorruptIndexException("invalid segment count: " + numSegments, input);
+      long fieldInfosGen = input.readLong();
+      long dvGen = input.readLong();
+      int softDelCount = format > VERSION_72 ? input.readInt() : 0;
+      if (softDelCount < 0 || softDelCount > info.maxDoc()) {
+        throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input);
       }
-
-      if (numSegments > 0) {
-        infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-      } else {
-        // else leave as null: no segments
+      if (softDelCount + delCount > info.maxDoc()) {
+        throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input);
       }
-
-      long totalDocs = 0;
-      for (int seg = 0; seg < numSegments; seg++) {
-        String segName = input.readString();
-        byte[] segmentID = new byte[StringHelper.ID_LENGTH];
-        input.readBytes(segmentID, 0, segmentID.length);
-        Codec codec = readCodec(input);
-        SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
-        info.setCodec(codec);
-        totalDocs += info.maxDoc();
-        long delGen = input.readLong();
-        int delCount = input.readInt();
-        if (delCount < 0 || delCount > info.maxDoc()) {
-          throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
-        }
-        long fieldInfosGen = input.readLong();
-        long dvGen = input.readLong();
-        int softDelCount = format > VERSION_72 ? input.readInt() : 0;
-        if (softDelCount < 0 || softDelCount > info.maxDoc()) {
-          throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input);
+      final byte[] sciId;
+      if (format > VERSION_74) {
+        byte marker = input.readByte();
+        switch (marker) {
+          case 1:
+            sciId = new byte[StringHelper.ID_LENGTH];
+            input.readBytes(sciId, 0, sciId.length);
+            break;
+          case 0:
+            sciId = null;
+            break;
+          default:
+            throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input);
         }
-        if (softDelCount + delCount > info.maxDoc()) {
-          throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input);
-        }
-        final byte[] sciId;
-        if (format > VERSION_74) {
-          byte marker = input.readByte();
-          switch (marker) {
-            case 1:
-              sciId = new byte[StringHelper.ID_LENGTH];
-              input.readBytes(sciId, 0, sciId.length);
-              break;
-            case 0:
-              sciId = null;
-              break;
-            default:
-              throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input);
-          }
-        } else {
-          sciId = null;
-        }
-        SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId);
-        siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
-        final Map<Integer,Set<String>> dvUpdateFiles;
-        final int numDVFields = input.readInt();
-        if (numDVFields == 0) {
-          dvUpdateFiles = Collections.emptyMap();
-        } else {
-          Map<Integer,Set<String>> map = new HashMap<>(numDVFields);
-          for (int i = 0; i < numDVFields; i++) {
-            map.put(input.readInt(), input.readSetOfStrings());
-          }
-          dvUpdateFiles = Collections.unmodifiableMap(map);
+      } else {
+        sciId = null;
+      }
+      SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId);
+      siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
+      final Map<Integer,Set<String>> dvUpdateFiles;
+      final int numDVFields = input.readInt();
+      if (numDVFields == 0) {
+        dvUpdateFiles = Collections.emptyMap();
+      } else {
+        Map<Integer,Set<String>> map = new HashMap<>(numDVFields);
+        for (int i = 0; i < numDVFields; i++) {
+          map.put(input.readInt(), input.readSetOfStrings());
         }
-        siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
-        infos.add(siPerCommit);
+        dvUpdateFiles = Collections.unmodifiableMap(map);
+      }
+      siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
+      infos.add(siPerCommit);
 
-        Version segmentVersion = info.getVersion();
+      Version segmentVersion = info.getVersion();
 
-        if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
-          throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
-        }
+      if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
+        throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
+      }
 
-        if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
-          throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
-        }
+      if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
+        throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
+      }
 
-        if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
-          throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
-        }
+      if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
+        throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
       }
+    }
 
-      infos.userData = input.readMapOfStrings();
+    infos.userData = input.readMapOfStrings();
 
-      // LUCENE-6299: check we are in bounds
-      if (totalDocs > IndexWriter.getActualMaxDocs()) {
-        throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input);
-      }
+    CodecUtil.checkFooter(input);
 
-      return infos;
-    } catch (Throwable t) {
-      priorE = t;
-    } finally {
-      CodecUtil.checkFooter(input, priorE);
+    // LUCENE-6299: check we are in bounds
+    if (totalDocs > IndexWriter.getActualMaxDocs()) {
+      throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input);
     }
-    throw new Error("Unreachable code");
+
+    return infos;
   }
 
   private static Codec readCodec(DataInput input) throws IOException {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
index 23c98ad..19d8214 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
@@ -18,16 +18,12 @@ package org.apache.lucene.index;
 
 
 import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
-import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.Version;
 
 import java.io.IOException;
@@ -182,62 +178,5 @@ public class TestSegmentInfos extends LuceneTestCase {
       assertEquals("clone changed but shouldn't", StringHelper.idToString(id), StringHelper.idToString(clone.getId()));
     }
   }
-
-  public void testBitFlippedTriggersCorruptIndexException() throws IOException {
-    BaseDirectoryWrapper dir = newDirectory();
-    dir.setCheckIndexOnClose(false);
-    byte id[] = StringHelper.randomId();
-    Codec codec = Codec.getDefault();
-
-    SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
-    SegmentInfo info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, Codec.getDefault(),
-                                       Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
-    info.setFiles(Collections.<String>emptySet());
-    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
-    SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, 0, -1, -1, -1, StringHelper.randomId());
-    sis.add(commitInfo);
-
-    info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_1", 1, false, Codec.getDefault(),
-                           Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
-    info.setFiles(Collections.<String>emptySet());
-    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
-    commitInfo = new SegmentCommitInfo(info, 0, 0,-1, -1, -1, StringHelper.randomId());
-    sis.add(commitInfo);
-
-    sis.commit(dir);
-
-    BaseDirectoryWrapper corruptDir = newDirectory();
-    corruptDir.setCheckIndexOnClose(false);
-    boolean corrupt = false;
-    for (String file : dir.listAll()) {
-      if (file.startsWith(IndexFileNames.SEGMENTS)) {
-        try (IndexInput in = dir.openInput(file, IOContext.DEFAULT);
-            IndexOutput out = corruptDir.createOutput(file, IOContext.DEFAULT)) {
-          final long corruptIndex = TestUtil.nextLong(random(), 0, in.length() - 1);
-          out.copyBytes(in, corruptIndex);
-          final int b = Byte.toUnsignedInt(in.readByte()) + TestUtil.nextInt(random(), 0x01, 0xff);
-          out.writeByte((byte) b);
-          out.copyBytes(in, in.length() - in.getFilePointer());
-        }
-        try (IndexInput in = corruptDir.openInput(file, IOContext.DEFAULT)) {
-          CodecUtil.checksumEntireFile(in);
-          if (VERBOSE) {
-            System.out.println("TEST: Altering the file did not update the checksum, aborting...");
-          }
-          return;
-        } catch (CorruptIndexException e) {
-          // ok
-        }
-        corrupt = true;
-      } else if (slowFileExists(corruptDir, file) == false) { // extraFS
-        corruptDir.copyFrom(dir, file, file, IOContext.DEFAULT);
-      }
-    }
-    assertTrue("No segments file found", corrupt);
-
-    expectThrows(CorruptIndexException.class, () -> SegmentInfos.readLatestCommit(corruptDir));
-    dir.close();
-    corruptDir.close();
-  }
 }
 


[lucene-solr] 16/47: SOLR-13289: Add Refguide changes (#1501)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 631f4a9bfaa48e7a674aff99112b2c4e823de837
Author: Tomas Fernandez Lobbe <tf...@apache.org>
AuthorDate: Thu May 21 16:55:49 2020 -0700

    SOLR-13289: Add Refguide changes (#1501)
---
 .../src/common-query-parameters.adoc               | 40 ++++++++++++++++++++++
 1 file changed, 40 insertions(+)

diff --git a/solr/solr-ref-guide/src/common-query-parameters.adoc b/solr/solr-ref-guide/src/common-query-parameters.adoc
index 1cfd391..594f513 100644
--- a/solr/solr-ref-guide/src/common-query-parameters.adoc
+++ b/solr/solr-ref-guide/src/common-query-parameters.adoc
@@ -361,3 +361,43 @@ This is what happens if a similar request is sent that adds `echoParams=all` to
   }
 }
 ----
+
+== minExactCount Parameter
+When this parameter is used, Solr will count the number of hits accurately at least until this value. After that, Solr can skip over documents that don't have a score high enough to enter in the top N. This can greatly improve performance of search queries. On the other hand, when this parameter is used, the `numFound` may not be exact, and may instead be an approximation.
+The `numFoundExact` boolean attribute is included in all responses, indicating if the `numFound` value is exact or an approximation. If it's an approximation, the real number of hits for the query is guaranteed to be greater or equal `numFound`.
+
+More about approximate document counting and `minExactCount`:
+
+* The documents returned in the response are guaranteed to be the docs with the top scores. This parameter will not make Solr skip documents that are to be returned in the response, it will only allow Solr to skip counting docs that, while they match the query, their score is low enough to not be in the top N.
+* Providing `minExactCount` doesn't guarantee that Solr will use approximate hit counting (and thus, provide the speedup). Some types of queries, or other parameters (like if facets are requested) will require accurate counting. The value of `numFoundExact` indicates if the approximation was used or not.
+* Approximate counting can only be used when sorting by `score desc` first (which is the default sort in Solr). Other fields can be used after `score desc`, but if any other type of sorting is used before score, then the approximation won't be applied.
+* When doing distributed queries across multiple shards, each shard will accurately count hits until `minExactCount` (which means the query could be hitting `numShards * minExactCount` docs and `numFound` in the response would still be accurate)  
+For example:
+
+[source,text]
+q=quick brown fox&minExactCount=100&rows=10
+
+[source,json]
+----
+"response": {
+    "numFound": 153,
+    "start": 0,
+    "numFoundExact": false,
+    "docs": Array[10]
+...
+----
+Since `numFoundExact=false`, we know the number of documents matching the query is greater or equal to 153. If we specify a higher value for `minExactCount`:
+
+[source,text]
+q=quick brown fox&minExactCount=200&rows=10
+
+[source,json]
+----
+"response": {
+    "numFound": 163,
+    "start": 0,
+    "numFoundExact": true,
+    "docs": Array[10]
+...
+----
+In this case we know that `163` is the exact number of hits for the query. Both queries must have returned the same number of documents in the top 10.


[lucene-solr] 10/47: SOLR-14492: Fix ArrayIndexOutOfBoundsException in json.facet 'terms' when FacetFieldProcessorByHashDV is used with aggregations over multivalued numeric fields

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 46b617e9334e49577779103afdb9b53525d39b46
Author: Chris Hostetter <ho...@apache.org>
AuthorDate: Wed May 20 11:16:05 2020 -0700

    SOLR-14492: Fix ArrayIndexOutOfBoundsException in json.facet 'terms' when FacetFieldProcessorByHashDV is used with aggregations over multivalued numeric fields
    
    SOLR-14477: Fix incorrect 'relatedness()' calculations in json.facet 'terms' when 'prefix' option is used
---
 solr/CHANGES.txt                                   |   6 +
 .../java/org/apache/solr/search/facet/AvgAgg.java  |   6 +-
 .../org/apache/solr/search/facet/CountValsAgg.java |   2 +-
 .../org/apache/solr/search/facet/DocValuesAcc.java |  16 +-
 .../search/facet/FacetFieldProcessorByArray.java   |   2 +-
 .../org/apache/solr/search/facet/MinMaxAgg.java    |   4 +-
 .../solr/search/facet/UnInvertedFieldAcc.java      |   6 +-
 .../solr/search/facet/TestCloudJSONFacetSKG.java   | 278 ++++--
 .../search/facet/TestCloudJSONFacetSKGEquiv.java   | 989 +++++++++++++++++++++
 .../apache/solr/search/facet/TestJsonFacets.java   | 105 ++-
 10 files changed, 1296 insertions(+), 118 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 7105d5c..e2dc073 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -186,6 +186,12 @@ Bug Fixes
 * SOLR-8394: /admin/luke was always showing 0 for indexHeapUsageBytes. It should work now.
   (Steve Molloy, Isabelle Giguere, David Smiley)
 
+* SOLR-14492: Fix ArrayIndexOutOfBoundsException in json.facet 'terms' when FacetFieldProcessorByHashDV is
+  used with aggregations over multivalued numeric fields (hossman)
+
+* SOLR-14477: Fix incorrect 'relatedness()' calculations in json.facet 'terms' when 'prefix' option is used
+  (hossman)
+
 Other Changes
 ---------------------
 * SOLR-14197: SolrResourceLoader: marked many methods as deprecated, and in some cases rerouted exiting logic to avoid
diff --git a/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java b/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java
index e22192b..e1a09a6 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/AvgAgg.java
@@ -134,7 +134,7 @@ public class AvgAgg extends SimpleAggValueSource {
     @Override
     public void resize(Resizer resizer) {
       super.resize(resizer);
-      resizer.resize(counts, 0);
+      this.counts = resizer.resize(counts, 0);
     }
   }
 
@@ -188,7 +188,7 @@ public class AvgAgg extends SimpleAggValueSource {
     @Override
     public void resize(Resizer resizer) {
       super.resize(resizer);
-      resizer.resize(counts, 0);
+      this.counts = resizer.resize(counts, 0);
     }
   }
 
@@ -244,7 +244,7 @@ public class AvgAgg extends SimpleAggValueSource {
     @Override
     public void resize(Resizer resizer) {
       super.resize(resizer);
-      resizer.resize(counts, 0);
+      this.counts = resizer.resize(counts, 0);
     }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java b/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java
index 8735c0f..4923cc8 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/CountValsAgg.java
@@ -140,7 +140,7 @@ public class CountValsAgg extends SimpleAggValueSource {
 
     @Override
     public void resize(Resizer resizer) {
-      resizer.resize(result, 0);
+      this.result = resizer.resize(result, 0);
     }
 
     @Override
diff --git a/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java b/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java
index 1603a50..38c9f08 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/DocValuesAcc.java
@@ -134,7 +134,7 @@ abstract class LongSortedNumericDVAcc extends SortedNumericDVAcc {
 
   @Override
   public void resize(Resizer resizer) {
-    resizer.resize(result, initialValue);
+    this.result = resizer.resize(result, initialValue);
   }
 
 }
@@ -169,7 +169,7 @@ abstract class DoubleSortedNumericDVAcc extends SortedNumericDVAcc {
 
   @Override
   public void resize(Resizer resizer) {
-    resizer.resize(result, initialValue);
+    this.result = resizer.resize(result, initialValue);
   }
 
   /**
@@ -246,8 +246,8 @@ abstract class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc {
   @Override
   public void resize(Resizer resizer) {
     super.resize(resizer);
-    resizer.resize(counts, 0);
-    resizer.resize(sum, 0);
+    this.counts = resizer.resize(counts, 0);
+    this.sum = resizer.resize(sum, 0);
   }
 }
 
@@ -325,7 +325,7 @@ abstract class LongSortedSetDVAcc extends SortedSetDVAcc {
 
   @Override
   public void resize(Resizer resizer) {
-    resizer.resize(result, initialValue);
+    this.result = resizer.resize(result, initialValue);
   }
 }
 
@@ -359,7 +359,7 @@ abstract class DoubleSortedSetDVAcc extends SortedSetDVAcc {
 
   @Override
   public void resize(Resizer resizer) {
-    resizer.resize(result, initialValue);
+    this.result = resizer.resize(result, initialValue);
   }
 }
 
@@ -419,7 +419,7 @@ abstract class SDVSortedSetAcc extends DoubleSortedSetDVAcc {
   @Override
   public void resize(Resizer resizer) {
     super.resize(resizer);
-    resizer.resize(counts, 0);
-    resizer.resize(sum, 0);
+    this.counts = resizer.resize(counts, 0);
+    this.sum = resizer.resize(sum, 0);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
index b60ec11..a018a87 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByArray.java
@@ -135,7 +135,7 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
    */
   public IntFunction<SlotContext> slotContext = (slotNum) -> {
     try {
-      Object value = sf.getType().toObject(sf, lookupOrd(slotNum));
+      Object value = sf.getType().toObject(sf, lookupOrd(slotNum + startTermIndex));
       Query q = makeBucketQuery(valueObjToString(value));
       assert null != q : "null query for: '" + value + "'";
       return new SlotContext(q);
diff --git a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java
index 0ff3ea7..a9c6b44 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/MinMaxAgg.java
@@ -221,7 +221,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
 
     @Override
     public void resize(Resizer resizer) {
-      resizer.resize(result, MISSING);
+      this.result = resizer.resize(result, MISSING);
     }
 
     @Override
@@ -504,7 +504,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
 
     @Override
     public void resize(Resizer resizer) {
-      resizer.resize(slotOrd, MISSING);
+      this.slotOrd = resizer.resize(slotOrd, MISSING);
     }
 
     @Override
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java
index 8fab341..5f662d7 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UnInvertedFieldAcc.java
@@ -91,7 +91,7 @@ abstract class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc {
 
   @Override
   public void resize(Resizer resizer) {
-    resizer.resize(result, initialValue);
+    this.result = resizer.resize(result, initialValue);
   }
 }
 
@@ -153,7 +153,7 @@ abstract class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
   @Override
   public void resize(Resizer resizer) {
     super.resize(resizer);
-    resizer.resize(counts, 0);
-    resizer.resize(sum, 0);
+    this.counts = resizer.resize(counts, 0);
+    this.sum = resizer.resize(sum, 0);
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java
index 32f3708..75e9611 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKG.java
@@ -46,6 +46,10 @@ import org.apache.solr.common.util.NamedList;
 import static org.apache.solr.search.facet.RelatednessAgg.computeRelatedness;
 import static org.apache.solr.search.facet.RelatednessAgg.roundTo5Digits;
 
+import org.noggit.JSONUtil;
+import org.noggit.JSONWriter;
+import org.noggit.JSONWriter.Writable;
+
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.slf4j.Logger;
@@ -73,6 +77,7 @@ import org.slf4j.LoggerFactory;
  * </p>
  * 
  * @see TestCloudJSONFacetJoinDomain
+ * @see TestCloudJSONFacetSKGEquiv
  */
 @Slow
 public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
@@ -87,9 +92,18 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
   private static final int UNIQUE_FIELD_VALS = 50;
 
   /** Multivalued string field suffixes that can be randomized for testing diff facet/join code paths */
-  private static final String[] STR_FIELD_SUFFIXES = new String[] { "_ss", "_sds", "_sdsS" };
+  private static final String[] MULTI_STR_FIELD_SUFFIXES = new String[]
+    { "_multi_ss", "_multi_sds", "_multi_sdsS" };
   /** Multivalued int field suffixes that can be randomized for testing diff facet/join code paths */
-  private static final String[] INT_FIELD_SUFFIXES = new String[] { "_is", "_ids", "_idsS" };
+  private static final String[] MULTI_INT_FIELD_SUFFIXES = new String[]
+    { "_multi_is", "_multi_ids", "_multi_idsS" };
+  
+  /** Single Valued string field suffixes that can be randomized for testing diff facet code paths */
+  private static final String[] SOLO_STR_FIELD_SUFFIXES = new String[]
+    { "_solo_s", "_solo_sd", "_solo_sdS" };
+  /** Single Valued int field suffixes that can be randomized for testing diff facet code paths */
+  private static final String[] SOLO_INT_FIELD_SUFFIXES = new String[]
+    { "_solo_i", "_solo_id", "_solo_idS" };
 
   /** A basic client for operations at the cloud level, default collection will be set */
   private static CloudSolrClient CLOUD_CLIENT;
@@ -100,7 +114,10 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
   private static void createMiniSolrCloudCluster() throws Exception {
     // sanity check constants
     assertTrue("bad test constants: some suffixes will never be tested",
-               (STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) && (INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM));
+               (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) &&
+               (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) &&
+               (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) &&
+               (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM));
     
     // we need DVs on point fields to compute stats & facets
     if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true");
@@ -152,9 +169,14 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
         for (int v = 0; v < numValsThisDoc; v++) {
           final String fieldValue = randFieldValue(fieldNum);
           
-          // for each fieldNum, there are actaully two fields: one string, and one integer
-          doc.addField(field(STR_FIELD_SUFFIXES, fieldNum), fieldValue);
-          doc.addField(field(INT_FIELD_SUFFIXES, fieldNum), fieldValue);
+          // multi valued: one string, and one integer
+          doc.addField(multiStrField(fieldNum), fieldValue);
+          doc.addField(multiIntField(fieldNum), fieldValue);
+        }
+        { // single valued: one string, and one integer
+          final String fieldValue = randFieldValue(fieldNum);
+          doc.addField(soloStrField(fieldNum), fieldValue);
+          doc.addField(soloIntField(fieldNum), fieldValue);
         }
       }
       CLOUD_CLIENT.add(doc);
@@ -172,8 +194,8 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
    * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that 
    * uses that number and one of hte specified suffixes in it's name.
    *
-   * @see #STR_FIELD_SUFFIXES
-   * @see #INT_FIELD_SUFFIXES
+   * @see #MULTI_STR_FIELD_SUFFIXES
+   * @see #MULTI_INT_FIELD_SUFFIXES
    * @see #MAX_FIELD_NUM
    * @see #randFieldValue
    */
@@ -183,11 +205,21 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
     final String suffix = suffixes[fieldNum % suffixes.length];
     return "field_" + fieldNum + suffix;
   }
-  private static String strfield(final int fieldNum) {
-    return field(STR_FIELD_SUFFIXES, fieldNum);
+  /** Given a (random) number, returns a consistent field name for a multi valued string field */
+  private static String multiStrField(final int fieldNum) {
+    return field(MULTI_STR_FIELD_SUFFIXES, fieldNum);
+  }
+  /** Given a (random) number, returns a consistent field name for a multi valued int field */
+  private static String multiIntField(final int fieldNum) {
+    return field(MULTI_INT_FIELD_SUFFIXES, fieldNum);
+  }
+  /** Given a (random) number, returns a consistent field name for a single valued string field */
+  private static String soloStrField(final int fieldNum) {
+    return field(SOLO_STR_FIELD_SUFFIXES, fieldNum);
   }
-  private static String intfield(final int fieldNum) {
-    return field(INT_FIELD_SUFFIXES, fieldNum);
+  /** Given a (random) number, returns a consistent field name for a single valued int field */
+  private static String soloIntField(final int fieldNum) {
+    return field(SOLO_INT_FIELD_SUFFIXES, fieldNum);
   }
 
   /**
@@ -224,19 +256,19 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
   public void testBespoke() throws Exception {
     { // trivial single level facet
       Map<String,TermFacet> facets = new LinkedHashMap<>();
-      TermFacet top = new TermFacet(strfield(9), UNIQUE_FIELD_VALS, 0, null);
+      TermFacet top = new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, null);
       facets.put("top1", top);
       final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS);
-      assertFacetSKGsAreCorrect(maxBuckets, facets, strfield(7)+":11", strfield(5)+":9", "*:*");
+      assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
       assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS);
     }
     
     { // trivial single level facet w/sorting on skg
       Map<String,TermFacet> facets = new LinkedHashMap<>();
-      TermFacet top = new TermFacet(strfield(9), UNIQUE_FIELD_VALS, 0, "skg desc");
+      TermFacet top = new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, "skg desc");
       facets.put("top2", top);
       final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS);
-      assertFacetSKGsAreCorrect(maxBuckets, facets, strfield(7)+":11", strfield(5)+":9", "*:*");
+      assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
       assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS);
     }
 
@@ -249,9 +281,9 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
       // because it causes FacetField.returnsPartial() to be "true"
       for (int limit : new int[] { 999999999, -1 }) {
         Map<String,TermFacet> facets = new LinkedHashMap<>();
-        facets.put("top_facet_limit__" + limit, new TermFacet(strfield(9), limit, 0, "skg desc"));
+        facets.put("top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc"));
         final AtomicInteger maxBuckets = new AtomicInteger(UNIQUE_FIELD_VALS);
-        assertFacetSKGsAreCorrect(maxBuckets, facets, strfield(7)+":11", strfield(5)+":9", "*:*");
+        assertFacetSKGsAreCorrect(maxBuckets, facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
         assertTrue("Didn't check a single bucket???", maxBuckets.get() < UNIQUE_FIELD_VALS);
       }
     }
@@ -294,7 +326,7 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
     for (int c = 0; c < numClauses; c++) {
       final int fieldNum = random().nextInt(MAX_FIELD_NUM);
       // keep queries simple, just use str fields - not point of test
-      clauses[c] = strfield(fieldNum) + ":" + randFieldValue(fieldNum);
+      clauses[c] = multiStrField(fieldNum) + ":" + randFieldValue(fieldNum);
     }
     return buildORQuery(clauses);
   }
@@ -319,7 +351,7 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
     final SolrParams baseParams = params("rows","0", "fore", foreQ, "back", backQ);
     
     final SolrParams facetParams = params("q", query,
-                                          "json.facet", ""+TermFacet.toJSONFacetParamValue(expected,null));
+                                          "json.facet", ""+TermFacet.toJSONFacetParamValue(expected));
     final SolrParams initParams = SolrParams.wrapAppended(facetParams, baseParams);
     
     log.info("Doing full run: {}", initParams);
@@ -367,6 +399,7 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
     for (Map.Entry<String,TermFacet> entry : expected.entrySet()) {
       final String facetKey = entry.getKey();
       final TermFacet facet = entry.getValue();
+      
       final NamedList results = (NamedList) actualFacetResponse.get(facetKey);
       assertNotNull(facetKey + " key missing from: " + actualFacetResponse, results);
       final List<NamedList> buckets = (List<NamedList>) results.get("buckets");
@@ -464,74 +497,78 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
                  skgBucket.get("relatedness"));
     
   }
-  
-  
+
   /**
    * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param.
    *
    * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters
    */
-  private static final class TermFacet {
-    public final String field;
+  private static final class TermFacet implements Writable {
+
+    /** non-skg subfacets for use in verification */
     public final Map<String,TermFacet> subFacets = new LinkedHashMap<>();
-    public final Integer limit; // may be null
-    public final Integer overrequest; // may be null
-    public final String sort; // may be null
+    
+    private final Map<String,Object> jsonData = new LinkedHashMap<>();
+
+    public final String field;
+    /** 
+     * @param field must be non null
+     * @param options can set any of options used in a term facet other then field or (sub) facets
+     */
+    public TermFacet(final String field, final Map<String,Object> options) {
+      assert null != field;
+      this.field = field;
+      
+      jsonData.putAll(options);
+      
+      // we don't allow these to be overridden by options, so set them now...
+      jsonData.put("type", "terms");
+      jsonData.put("field", field);
+      // see class javadocs for why we always use refine:true & the query:'*:*' domain for this test.
+      jsonData.put("refine", true);
+      jsonData.put("domain", map("query","*:*"));
+      
+    }
+
+    /** all params except field can be null */
+    public TermFacet(String field, Integer limit, Integer overrequest, String sort) {
+      this(field, map("limit", limit, "overrequest", overrequest, "sort", sort));
+    }
+    
     /** Simplified constructor asks for limit = # unique vals */
     public TermFacet(String field) {
       this(field, UNIQUE_FIELD_VALS, 0, "skg desc"); 
       
     }
-    public TermFacet(String field, Integer limit, Integer overrequest, String sort) {
-      assert null != field;
-      this.field = field;
-      this.limit = limit;
-      this.overrequest = overrequest;
-      this.sort = sort;
+    @Override
+    public void write(JSONWriter writer) {
+      // we need to include both our "real" subfacets, along with our SKG stat and 'processEmpty'
+      // (we don't put these in 'subFacets' to help keep the verification code simpler
+      final Map<String,Object> sub = map("processEmpty", true,
+                                         "skg", "relatedness($fore,$back)");
+      sub.putAll(subFacets);
+      
+      final Map<String,Object> out = map("facet", sub);
+      out.putAll(jsonData);
+      
+      writer.write(out);
     }
 
     /**
-     * recursively generates the <code>json.facet</code> param value to use for testing this facet
-     */
-    private CharSequence toJSONFacetParamValue() {
-      final String limitStr = (null == limit) ? "" : (", limit:" + limit);
-      final String overrequestStr = (null == overrequest) ? "" : (", overrequest:" + overrequest);
-      final String sortStr = (null == sort) ? "" : (", sort: '" + sort + "'");
-      final StringBuilder sb
-        = new StringBuilder("{ type:terms, field:" + field + limitStr + overrequestStr + sortStr);
-
-      // see class javadocs for why we always use refine:true & the query:'*:*' domain for this test.
-      sb.append(", refine: true, domain: { query: '*:*' }, facet:");
-      sb.append(toJSONFacetParamValue(subFacets, "skg : 'relatedness($fore,$back)'"));
-      sb.append("}");
-      return sb;
-    }
-    
-    /**
      * Given a set of (possibly nested) facets, generates a suitable <code>json.facet</code> param value to 
      * use for testing them against in a solr request.
      */
-    public static CharSequence toJSONFacetParamValue(final Map<String,TermFacet> facets,
-                                                     final String extraJson) {
+    public static String toJSONFacetParamValue(final Map<String,TermFacet> facets) {
       assert null != facets;
-      if (0 == facets.size() && null == extraJson) {
-        return "";
-      }
-
-      StringBuilder sb = new StringBuilder("{ processEmpty: true, ");
-      for (String key : facets.keySet()) {
-        sb.append(key).append(" : ").append(facets.get(key).toJSONFacetParamValue());
-        sb.append(" ,");
-      }
-      if (null == extraJson) {
-        sb.setLength(sb.length() - 1);
-      } else {
-        sb.append(extraJson);
-      }
-      sb.append("}");
-      return sb;
+      assert ! facets.isEmpty();
+      
+      // see class javadocs for why we always want processEmpty
+      final Map<String,Object> jsonData = map("processEmpty", true);
+      jsonData.putAll(facets);
+      
+      return JSONUtil.toJSON(jsonData, -1); // no newlines
     }
-    
+
     /**
      * Factory method for generating some random facets.  
      *
@@ -545,12 +582,83 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
       final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3));
       return buildRandomFacets(keyCounter, maxDepth);
     }
+    
+    /**
+     * picks a random field to facet on.
+     *
+     * @see #field
+     * @return field name, never null
+     */
+    public static String randomFacetField(final Random r) {
+      final int fieldNum = r.nextInt(MAX_FIELD_NUM);
+      switch(r.nextInt(4)) {
+        case 0: return multiStrField(fieldNum);
+        case 1: return multiIntField(fieldNum);
+        case 2: return soloStrField(fieldNum);
+        case 3: return soloIntField(fieldNum);
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
 
     /**
+     * picks a random value for the "perSeg" param, biased in favor of interesting test cases
+     *
+     * @return a Boolean, may be null
+     */
+    public static Boolean randomPerSegParam(final Random r) {
+
+      switch(r.nextInt(4)) {
+        case 0: return true;
+        case 1: return false;
+        case 2: 
+        case 3: return null;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+    
+    /**
+     * picks a random value for the "prefix" param, biased in favor of interesting test cases
+     *
+     * @return a valid prefix value, may be null
+     */
+    public static String randomPrefixParam(final Random r, final String facetField) {
+      
+      if (facetField.contains("multi_i") || facetField.contains("solo_i")) {
+        // never used a prefix on a numeric field
+        return null;
+      }
+      assert (facetField.contains("multi_s") || facetField.contains("solo_s"))
+        : "possible facet fields have changed, breaking test";
+      
+      switch(r.nextInt(5)) {
+        case 0: return "2";
+        case 1: return "3";
+        case 2: 
+        case 3: 
+        case 4: return null;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+    
+    /**
+     * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases.  
+     *
+     * @return a sort string (w/direction), or null to specify nothing (trigger default behavior)
+     * @see #randomSortParam
+     */
+    public static String randomPrelimSortParam(final Random r, final String sort) {
+
+      if (null != sort && sort.startsWith("skg") && 1 == TestUtil.nextInt(random(), 0, 3)) {
+        return "count desc";
+      }
+      return null;
+    }
+    /**
      * picks a random value for the "sort" param, biased in favor of interesting test cases
      *
      * @return a sort string (w/direction), or null to specify nothing (trigger default behavior)
      * @see #randomLimitParam
+     * @see #randomPrelimSortParam
      */
     public static String randomSortParam(Random r) {
 
@@ -632,12 +740,17 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
         if (keyCounter.get() < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk
           
           final String sort = randomSortParam(random());
-          final Integer limit = randomLimitParam(random(), sort);
-          final Integer overrequest = randomOverrequestParam(random());
-          final TermFacet facet =  new TermFacet(field((random().nextBoolean()
-                                                        ? STR_FIELD_SUFFIXES : INT_FIELD_SUFFIXES),
-                                                       random().nextInt(MAX_FIELD_NUM)),
-                                                 limit, overrequest, sort);
+          final String facetField = randomFacetField(random());
+          final TermFacet facet =  new TermFacet(facetField,
+                                                 map("sort", sort,
+                                                     "prelim_sort", randomPrelimSortParam(random(), sort),
+                                                     "limit", randomLimitParam(random(), sort),
+                                                     "overrequest", randomOverrequestParam(random()),
+                                                     "prefix", randomPrefixParam(random(), facetField),
+                                                     "perSeg", randomPerSegParam(random())));
+                                                     
+
+                                                 
           results.put("facet_" + keyCounter.incrementAndGet(), facet);
           if (0 < maxDepth) {
             // if we're going wide, don't go deep
@@ -675,5 +788,20 @@ public class TestCloudJSONFacetSKG extends SolrCloudTestCase {
                                                         client.getZkStateReader(),
                                                         true, true, 330);
   }
+  
+  /** helper macro: fails on null keys, skips pairs with null values  */
+  public static Map<String,Object> map(Object... pairs) {
+    if (0 != pairs.length % 2) throw new IllegalArgumentException("uneven number of arguments");
+    final Map<String,Object> map = new LinkedHashMap<>();
+    for (int i = 0; i < pairs.length; i+=2) {
+      final Object key = pairs[i];
+      final Object val = pairs[i+1];
+      if (null == key) throw new NullPointerException("arguemnt " + i);
+      if (null == val) continue;
+      
+      map.put(key.toString(), val);
+    }
+    return map;
+  }
 
 }
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java
new file mode 100644
index 0000000..5aa16e0
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestCloudJSONFacetSKGEquiv.java
@@ -0,0 +1,989 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search.facet;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Random;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.lucene.util.TestUtil;
+import org.apache.solr.BaseDistributedSearchTestCase;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.cloud.AbstractDistribZkTestBase;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import static org.apache.solr.search.facet.FacetField.FacetMethod;
+
+import org.noggit.JSONUtil;
+import org.noggit.JSONWriter;
+import org.noggit.JSONWriter.Writable;
+  
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/** 
+ * <p>
+ * A randomized test of nested facets using the <code>relatedness()</code> function, that asserts the 
+ * results are consistent and equivilent regardless of what <code>method</code> (ie: FacetFieldProcessor) 
+ * is requested.
+ * </p>
+ * <p>
+ * This test is based on {@link TestCloudJSONFacetSKG} but does <em>not</em> 
+ * force <code>refine: true</code> nor specify a <code>domain: { 'query':'*:*' }</code> for every facet, 
+ * because this test does not attempt to prove the results with validation requests.
+ * </p>
+ * <p>
+ * This test only concerns itself with the equivilency of results
+ * </p>
+ * 
+ * @see TestCloudJSONFacetSKG
+ */
+public class TestCloudJSONFacetSKGEquiv extends SolrCloudTestCase {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private static final String DEBUG_LABEL = MethodHandles.lookup().lookupClass().getName();
+  private static final String COLLECTION_NAME = DEBUG_LABEL + "_collection";
+
+  private static final int DEFAULT_LIMIT = FacetField.DEFAULT_FACET_LIMIT;
+  private static final int MAX_FIELD_NUM = 15;
+  private static final int UNIQUE_FIELD_VALS = 50;
+
+  /** Multi-Valued string field suffixes that can be randomized for testing diff facet code paths */
+  private static final String[] MULTI_STR_FIELD_SUFFIXES = new String[]
+    { "_multi_ss", "_multi_sds", "_multi_sdsS" };
+  /** Multi-Valued int field suffixes that can be randomized for testing diff facet code paths */
+  private static final String[] MULTI_INT_FIELD_SUFFIXES = new String[]
+    { "_multi_is", "_multi_ids", "_multi_idsS" };
+
+  /** Single Valued string field suffixes that can be randomized for testing diff facet code paths */
+  private static final String[] SOLO_STR_FIELD_SUFFIXES = new String[]
+    { "_solo_s", "_solo_sd", "_solo_sdS" };
+  /** Single Valued int field suffixes that can be randomized for testing diff facet code paths */
+  private static final String[] SOLO_INT_FIELD_SUFFIXES = new String[]
+    { "_solo_i", "_solo_id", "_solo_idS" };
+
+  /** A basic client for operations at the cloud level, default collection will be set */
+  private static CloudSolrClient CLOUD_CLIENT;
+  /** One client per node */
+  private static final ArrayList<HttpSolrClient> CLIENTS = new ArrayList<>(5);
+
+  @BeforeClass
+  private static void createMiniSolrCloudCluster() throws Exception {
+    // sanity check constants
+    assertTrue("bad test constants: some suffixes will never be tested",
+               (MULTI_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) &&
+               (MULTI_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM) &&
+               (SOLO_STR_FIELD_SUFFIXES.length < MAX_FIELD_NUM) &&
+               (SOLO_INT_FIELD_SUFFIXES.length < MAX_FIELD_NUM));
+    
+    // we need DVs on point fields to compute stats & facets
+    if (Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)) System.setProperty(NUMERIC_DOCVALUES_SYSPROP,"true");
+    
+    // multi replicas should not matter...
+    final int repFactor = usually() ? 1 : 2;
+    // ... but we definitely want to test multiple shards
+    final int numShards = TestUtil.nextInt(random(), 1, (usually() ? 2 :3));
+    final int numNodes = (numShards * repFactor);
+   
+    final String configName = DEBUG_LABEL + "_config-set";
+    final Path configDir = Paths.get(TEST_HOME(), "collection1", "conf");
+    
+    configureCluster(numNodes).addConfig(configName, configDir).configure();
+    
+    Map<String, String> collectionProperties = new LinkedHashMap<>();
+    collectionProperties.put("config", "solrconfig-tlog.xml");
+    collectionProperties.put("schema", "schema_latest.xml");
+    CollectionAdminRequest.createCollection(COLLECTION_NAME, configName, numShards, repFactor)
+        .setProperties(collectionProperties)
+        .process(cluster.getSolrClient());
+
+    CLOUD_CLIENT = cluster.getSolrClient();
+    CLOUD_CLIENT.setDefaultCollection(COLLECTION_NAME);
+
+    waitForRecoveriesToFinish(CLOUD_CLIENT);
+
+    for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+      CLIENTS.add(getHttpSolrClient(jetty.getBaseUrl() + "/" + COLLECTION_NAME + "/"));
+    }
+
+    final int numDocs = atLeast(100);
+    for (int id = 0; id < numDocs; id++) {
+      SolrInputDocument doc = sdoc("id", ""+id);
+
+      // NOTE: for each fieldNum, there are actaully 4 fields: multi(str+int) + solo(str+int)
+      for (int fieldNum = 0; fieldNum < MAX_FIELD_NUM; fieldNum++) {
+        // NOTE: Some docs may not have any value in some fields
+        final int numValsThisDoc = TestUtil.nextInt(random(), 0, (usually() ? 5 : 10));
+        for (int v = 0; v < numValsThisDoc; v++) {
+          final String fieldValue = randFieldValue(fieldNum);
+          
+          // multi valued: one string, and one integer
+          doc.addField(multiStrField(fieldNum), fieldValue);
+          doc.addField(multiIntField(fieldNum), fieldValue);
+        }
+        if (3 <= numValsThisDoc) { // use num values in multivalue to inform sparseness of single value
+          final String fieldValue = randFieldValue(fieldNum);
+          doc.addField(soloStrField(fieldNum), fieldValue);
+          doc.addField(soloIntField(fieldNum), fieldValue);
+        }
+      }
+      CLOUD_CLIENT.add(doc);
+      if (random().nextInt(100) < 1) {
+        CLOUD_CLIENT.commit();  // commit 1% of the time to create new segments
+      }
+      if (random().nextInt(100) < 5) {
+        CLOUD_CLIENT.add(doc);  // duplicate the doc 5% of the time to create deleted docs
+      }
+    }
+    CLOUD_CLIENT.commit();
+
+    log.info("Created {} using numNodes={}, numShards={}, repFactor={}, numDocs={}",
+             COLLECTION_NAME, numNodes, numShards, repFactor, numDocs);
+  }
+
+  /**
+   * Given a (random) number, and a (static) array of possible suffixes returns a consistent field name that 
+   * uses that number and one of hte specified suffixes in it's name.
+   *
+   * @see #MULTI_STR_FIELD_SUFFIXES
+   * @see #MULTI_INT_FIELD_SUFFIXES
+   * @see #MAX_FIELD_NUM
+   * @see #randFieldValue
+   */
+  private static String field(final String[] suffixes, final int fieldNum) {
+    assert fieldNum < MAX_FIELD_NUM;
+    
+    final String suffix = suffixes[fieldNum % suffixes.length];
+    return "field_" + fieldNum + suffix;
+  }
+  /** Given a (random) number, returns a consistent field name for a multi valued string field */
+  private static String multiStrField(final int fieldNum) {
+    return field(MULTI_STR_FIELD_SUFFIXES, fieldNum);
+  }
+  /** Given a (random) number, returns a consistent field name for a multi valued int field */
+  private static String multiIntField(final int fieldNum) {
+    return field(MULTI_INT_FIELD_SUFFIXES, fieldNum);
+  }
+  /** Given a (random) number, returns a consistent field name for a single valued string field */
+  private static String soloStrField(final int fieldNum) {
+    return field(SOLO_STR_FIELD_SUFFIXES, fieldNum);
+  }
+  /** Given a (random) number, returns a consistent field name for a single valued int field */
+  private static String soloIntField(final int fieldNum) {
+    return field(SOLO_INT_FIELD_SUFFIXES, fieldNum);
+  }
+
+  /**
+   * Given a (random) field number, returns a random (integer based) value for that field.
+   * NOTE: The number of unique values in each field is constant acording to {@link #UNIQUE_FIELD_VALS}
+   * but the precise <em>range</em> of values will vary for each unique field number, such that cross field joins 
+   * will match fewer documents based on how far apart the field numbers are.
+   *
+   * @see #UNIQUE_FIELD_VALS
+   * @see #field
+   */
+  private static String randFieldValue(final int fieldNum) {
+    return "" + (fieldNum + TestUtil.nextInt(random(), 1, UNIQUE_FIELD_VALS));
+  }
+
+  
+  @AfterClass
+  private static void afterClass() throws Exception {
+    if (null != CLOUD_CLIENT) {
+      CLOUD_CLIENT.close();
+      CLOUD_CLIENT = null;
+    }
+    for (HttpSolrClient client : CLIENTS) {
+      client.close();
+    }
+    CLIENTS.clear();
+  }
+  
+  /**
+   * Sanity check that our method of varying the <code>method</code> param
+   * works and can be verified by inspecting the debug output of basic requests.
+   */
+  public void testWhiteboxSanityMethodProcessorDebug() throws Exception {
+    // NOTE: json.facet debugging output can be wonky, particularly when dealing with cloud
+    // so for these queries we keep it simple:
+    // - only one "top" facet per request
+    // - no refinement
+    // even with those constraints in place, a single facet can (may/sometimes?) produce multiple debug
+    // blocks - aparently due to shard merging? So...
+    // - only inspect the "first" debug NamedList in the results
+    //
+    
+    // simple individual facet that sorts on an skg stat...
+    final TermFacet f = new TermFacet(soloStrField(9), 10, 0, "skg desc", null);
+    final Map<String,TermFacet> facets = new LinkedHashMap<>();
+    facets.put("str", f);
+    
+    final SolrParams facetParams = params("rows","0",
+                                          "debug","true", // SOLR-14451
+                                          // *:* is the only "safe" query for this test,
+                                          // to ensure we always have at least one bucket for every facet
+                                          // so we can be confident in getting the debug we expect...
+                                          "q", "*:*",
+                                          "fore", multiStrField(7)+":11",
+                                          "back", "*:*",
+                                          "json.facet", Facet.toJSONFacetParamValue(facets));
+    
+    { // dv 
+      final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dv"),
+                                                        facetParams);
+      final NamedList<Object> debug = getFacetDebug(params);
+      assertEquals(FacetFieldProcessorByArrayDV.class.getSimpleName(), debug.get("processor"));
+    }
+    { // dvhash
+      final SolrParams params = SolrParams.wrapDefaults(params("method_val", "dvhash"),
+                                                        facetParams);
+      final NamedList<Object> debug = getFacetDebug(params);
+      assertEquals(FacetFieldProcessorByHashDV.class.getSimpleName(), debug.get("processor"));
+    }
+  }
+
+  /**
+   * returns the <b>FIRST</b> NamedList (under the implicit 'null' FacetQuery) in the "facet-trace" output 
+   * of the request.  Should not be used with multiple "top level" facets 
+   * (the output is too confusing in cloud mode to be confident where/qhy each NamedList comes from)
+   */
+  private NamedList<Object> getFacetDebug(final SolrParams params) {
+    try {
+      final QueryResponse rsp = (new QueryRequest(params)).process(getRandClient(random()));
+      assertNotNull(params + " is null rsp?", rsp);
+      final NamedList topNamedList = rsp.getResponse();
+      assertNotNull(params + " is null topNamedList?", topNamedList);
+      
+      // skip past the (implicit) top Facet query to get it's "sub-facets" (the real facets)...
+      final List<NamedList<Object>> facetDebug =
+        (List<NamedList<Object>>) topNamedList.findRecursive("debug", "facet-trace", "sub-facet");
+      assertNotNull(topNamedList + " ... null facet debug?", facetDebug);
+      assertFalse(topNamedList + " ... not even one facet debug?", facetDebug.isEmpty());
+      return facetDebug.get(0);
+    } catch (Exception e) {
+      throw new RuntimeException("query failed: " + params + ": " + 
+                                 e.getMessage(), e);
+    } 
+
+  }
+  
+  /** 
+   * Test some small, hand crafted, but non-trivial queries that are
+   * easier to trace/debug then a pure random monstrosity.
+   * (ie: if something obvious gets broken, this test may fail faster and in a more obvious way then testRandom)
+   */
+  public void testBespoke() throws Exception {
+    { // two trivial single level facets
+      Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("str", new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, null, null));
+      facets.put("int", new TermFacet(multiIntField(9), UNIQUE_FIELD_VALS, 0, null, null));
+      assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+    }
+    
+    { // trivial single level facet w/sorting on skg and refinement explicitly disabled
+      Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("xxx", new TermFacet(multiStrField(9), UNIQUE_FIELD_VALS, 0, "skg desc", false));
+      assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+    }
+    
+    { // trivial single level facet w/ perSeg
+      Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("xxx", new TermFacet(multiStrField(9),
+                                      map("perSeg", true)));
+      
+      assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+    }
+    
+    { // trivial single level facet w/ prefix 
+      Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("xxx", new TermFacet(multiStrField(9),
+                                      map("prefix", "2")));
+      
+      
+      assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+    }
+    
+    { // trivial single level facet w/ 2 diff ways to request "limit = (effectively) Infinite"
+      // to sanity check refinement of buckets missing from other shard in both cases
+      
+      // NOTE that these two queries & facets *should* effectively identical given that the
+      // very large limit value is big enough no shard will ever return that may terms,
+      // but the "limit=-1" case it actaully triggers slightly different code paths
+      // because it causes FacetField.returnsPartial() to be "true"
+      for (int limit : new int[] { 999999999, -1 }) {
+        Map<String,TermFacet> facets = new LinkedHashMap<>();
+        facets.put("top_facet_limit__" + limit, new TermFacet(multiStrField(9), limit, 0, "skg desc", true));
+        assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+      }
+    }
+    
+    { // multi-valued facet field w/infinite limit and an extra (non-SKG) stat
+      final TermFacet xxx = new TermFacet(multiStrField(12), -1, 0, "count asc", false);
+      xxx.subFacets.put("sum", new SumFacet(multiIntField(4)));
+      final Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("xxx", xxx);
+      assertFacetSKGsAreConsistent(facets,
+                                   buildORQuery(multiStrField(13) + ":26",
+                                                multiStrField(6) + ":33",
+                                                multiStrField(9) + ":24"),
+                                   buildORQuery(multiStrField(4) + ":27",
+                                                multiStrField(12) + ":18",
+                                                multiStrField(2) + ":28",
+                                                multiStrField(13) + ":50"),
+                                   "*:*");
+    }
+  }
+  
+  /** 
+   * If/when we can re-enable this test, make sure to update {@link TermFacet#buildRandom} 
+   * and {@link #testBespokeStructures} to start doing randomized testing of <code>allBuckets</code>
+   */
+  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-14467")
+  public void testBespokeAllBuckets() throws Exception {
+    { // single level facet w/sorting on skg and allBuckets
+      Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("xxx", new TermFacet(multiStrField(9), map("sort", "skg desc",
+                                                            "allBuckets", true)));
+      
+      assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+    }
+  }
+  
+  public void testBespokePrefix() throws Exception {
+    { // trivial single level facet w/ prefix 
+      Map<String,TermFacet> facets = new LinkedHashMap<>();
+      facets.put("xxx", new TermFacet(multiStrField(9),
+                                      map("sort", "skg desc",
+                                          "limit", -1,
+                                          "prefix", "2")));
+      
+      assertFacetSKGsAreConsistent(facets, multiStrField(7)+":11", multiStrField(5)+":9", "*:*");
+    }
+  }
+  
+  /** 
+   * Given a few explicit "structures" of requests, test many permutations of various params/options.
+   * This is more complex then {@link #testBespoke} but should still be easier to trace/debug then 
+   * a pure random monstrosity.
+   */
+  public void testBespokeStructures() throws Exception {
+    // we don't need to test every field, just make sure we test enough fields to hit every suffix..
+    final int maxFacetFieldNum = Collections.max(Arrays.asList(MULTI_STR_FIELD_SUFFIXES.length,
+                                                               MULTI_INT_FIELD_SUFFIXES.length,
+                                                               SOLO_STR_FIELD_SUFFIXES.length,
+                                                               SOLO_INT_FIELD_SUFFIXES.length));
+    
+    for (int facetFieldNum = 0; facetFieldNum < maxFacetFieldNum; facetFieldNum++) {
+      for (String facetFieldName : Arrays.asList(soloStrField(facetFieldNum), multiStrField(facetFieldNum))) {
+        for (int limit : Arrays.asList(10, -1)) {
+          for (String sort : Arrays.asList("count desc", "skg desc", "index asc")) {
+            for (Boolean refine : Arrays.asList(false, true)) {
+              { // 1 additional (non-SKG) stat
+                final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit,
+                                                                        "overrequest", 0,
+                                                                        "sort", sort,
+                                                                        "refine", refine));
+                xxx.subFacets.put("sum", new SumFacet(soloIntField(3)));
+                final Map<String,TermFacet> facets = new LinkedHashMap<>();
+                facets.put("xxx1", xxx);
+                assertFacetSKGsAreConsistent(facets,
+                                             buildORQuery(multiStrField(11) + ":55",
+                                                          multiStrField(0) + ":46"),
+                                             multiStrField(5)+":9", "*:*");
+              }
+              { // multiple SKGs
+                final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit,
+                                                                        "overrequest", 0,
+                                                                        "sort", sort,
+                                                                        "refine", refine));
+                xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", "*:*"));
+                final Map<String,TermFacet> facets = new LinkedHashMap<>();
+                facets.put("xxx2", xxx);
+                assertFacetSKGsAreConsistent(facets,
+                                             buildORQuery(multiStrField(11) + ":55",
+                                                          multiStrField(0) + ":46"),
+                                             multiStrField(5)+":9", "*:*");
+              }
+              { // multiple SKGs and a multiple non-SKG stats
+                final TermFacet xxx = new TermFacet(facetFieldName, map("limit", limit,
+                                                                        "overrequest", 0,
+                                                                        "sort", sort,
+                                                                        "refine", refine));
+                xxx.subFacets.put("minAAA", new SumFacet(soloIntField(3)));
+                xxx.subFacets.put("skg2", new RelatednessFacet(multiStrField(2)+":9", "*:*"));
+                xxx.subFacets.put("minBBB", new SumFacet(soloIntField(2)));
+                final Map<String,TermFacet> facets = new LinkedHashMap<>();
+                facets.put("xxx3", xxx);
+                assertFacetSKGsAreConsistent(facets,
+                                             buildORQuery(multiStrField(11) + ":55",
+                                                          multiStrField(0) + ":46"),
+                                             multiStrField(5)+":9", "*:*");
+              }
+            }
+          }
+        }
+      }
+    }
+  }
+  
+  public void testRandom() throws Exception {
+
+    final int numIters = atLeast(10);
+    for (int iter = 0; iter < numIters; iter++) {
+      assertFacetSKGsAreConsistent(TermFacet.buildRandomFacets(),
+                                   buildRandomQuery(), buildRandomQuery(), buildRandomQuery());
+    }
+  }
+
+  /**
+   * Generates a random query string across the randomized fields/values in the index
+   *
+   * @see #randFieldValue
+   * @see #field
+   */
+  private static String buildRandomQuery() {
+    if (0 == TestUtil.nextInt(random(), 0,10)) {
+      return "*:*";
+    }
+    final int numClauses = TestUtil.nextInt(random(), 3, 10);
+    return buildRandomORQuery(numClauses);
+  }
+  /** The more clauses, the more docs it's likely to match */
+  private static String buildRandomORQuery(final int numClauses) {
+    final String[] clauses = new String[numClauses];
+    for (int c = 0; c < numClauses; c++) {
+      final int fieldNum = random().nextInt(MAX_FIELD_NUM);
+      // keep queries simple, just use str fields - not point of test
+      clauses[c] = multiStrField(fieldNum) + ":" + randFieldValue(fieldNum);
+    }
+    return buildORQuery(clauses);
+  }
+
+  private static String buildORQuery(String... clauses) {
+    assert 0 < clauses.length;
+    return "(" + String.join(" OR ", clauses) + ")";
+  }
+  
+  
+  /**
+   * Given a set of term facets, and top level query strings, asserts that 
+   * the results of these queries are identical even when varying the <code>method_val</code> param
+   */
+  private void assertFacetSKGsAreConsistent(final Map<String,TermFacet> facets,
+                                            final String query,
+                                            final String foreQ,
+                                            final String backQ) throws SolrServerException, IOException {
+    final SolrParams basicParams = params("rows","0",
+                                          "q", query, "fore", foreQ, "back", backQ,
+                                          "json.facet", Facet.toJSONFacetParamValue(facets));
+    
+    log.info("Doing full run: {}", basicParams);
+    try {
+
+      // start by recording the results of the purely "default" behavior...
+      final NamedList expected = getFacetResponse(basicParams);
+
+      // now loop over all processors and compare them to the "default"...
+      for (FacetMethod method : EnumSet.allOf(FacetMethod.class)) {
+        ModifiableSolrParams options = params("method_val", method.toString().toLowerCase(Locale.ROOT));
+          
+        final NamedList actual = getFacetResponse(SolrParams.wrapAppended(options, basicParams));
+
+        // we can't rely on a trivial assertEquals() comparison...
+        // 
+        // the order of the sub-facet keys can change between
+        // processors.  (notably: method:enum vs method:smart when sort:"index asc")
+        // 
+        // NOTE: this doesn't ignore the order of the buckets,
+        // it ignores the order of the keys in each bucket...
+        final String pathToMismatch = BaseDistributedSearchTestCase.compare
+          (expected, actual, 0,
+           Collections.singletonMap("buckets", BaseDistributedSearchTestCase.UNORDERED));
+        if (null != pathToMismatch) {
+          log.error("{}: expected = {}", options, expected);
+          log.error("{}: actual = {}", options, actual);
+          fail("Mismatch: " + pathToMismatch + " using " + options);
+        }
+      }
+    } catch (AssertionError e) {
+      throw new AssertionError(basicParams + " ===> " + e.getMessage(), e);
+    } finally {
+      log.info("Ending full run"); 
+    }
+  }
+
+  /**     
+   * We ignore {@link QueryResponse#getJsonFacetingResponse()} because it isn't as useful for
+   * doing a "deep equals" comparison across requests
+   */
+  private NamedList getFacetResponse(final SolrParams params) {
+    try {
+      final QueryResponse rsp = (new QueryRequest(params)).process(getRandClient(random()));
+      assertNotNull(params + " is null rsp?", rsp);
+      final NamedList topNamedList = rsp.getResponse();
+      assertNotNull(params + " is null topNamedList?", topNamedList);
+      final NamedList facetResponse = (NamedList) topNamedList.get("facets");
+      assertNotNull("null facet results?", facetResponse);
+      assertEquals("numFound mismatch with top count?",
+                   rsp.getResults().getNumFound(), ((Number)facetResponse.get("count")).longValue());
+      
+      return facetResponse;
+      
+    } catch (Exception e) {
+      throw new RuntimeException("query failed: " + params + ": " + 
+                                 e.getMessage(), e);
+    }
+  }
+
+  private static interface Facet { // Mainly just a Marker Interface
+    
+    /**
+     * Given a set of (possibly nested) facets, generates a suitable <code>json.facet</code> param value to 
+     * use for testing them against in a solr request.
+     */
+    public static String toJSONFacetParamValue(final Map<String,? extends Facet> facets) {
+      assert null != facets;
+      assert ! facets.isEmpty();
+
+      return JSONUtil.toJSON(facets, -1); // no newlines
+    }
+  }
+
+  /** 
+   * trivial facet that is not SKG (and doesn't have any of it's special behavior) for the purposes 
+   * of testing how TermFacet behaves with a mix of sub-facets.
+   */
+  private static final class SumFacet implements Facet {
+    private final String field;
+    public SumFacet(final String field) {
+      this.field = field;
+    }
+    @Override
+    public String toString() { // used in JSON by default
+      return "sum(" + field + ")";
+    }
+    public static SumFacet buildRandom() {
+      final int fieldNum = random().nextInt(MAX_FIELD_NUM);
+      final boolean multi = random().nextBoolean();
+      return new SumFacet(multi ? multiIntField(fieldNum) : soloIntField(fieldNum));
+    }
+  }
+  
+  /**
+   * Trivial data structure for modeling a simple <code>relatedness()</code> facet that can be written out as a json.facet param.
+   *
+   * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters
+   *
+   * The specified fore/back queries will be wrapped in localparam syntax in the resulting json, 
+   * unless they are 'null' in which case <code>$fore</code> and <code>$back</code> refs will be used 
+   * in their place, and must be set as request params (this allows "random" facets to still easily 
+   * trigger the "nested facets re-using the same fore/back set for SKG situation)
+   */
+  private static final class RelatednessFacet implements Facet, Writable {
+    public final Map<String,Object> jsonData = new LinkedHashMap<>();
+    
+    /** Assumes null for fore/back queries w/no options */
+    public RelatednessFacet() {
+      this(null, null, map());
+    }
+    /** Assumes no options */
+    public RelatednessFacet(final String foreQ, final String backQ) {
+      this(foreQ, backQ, map());
+    }
+    public RelatednessFacet(final String foreQ, final String backQ,
+                            final Map<String,Object> options) {
+      assert null != options;
+      
+      final String f = null == foreQ ? "$fore" : "{!v='"+foreQ+"'}";
+      final String b = null == backQ ? "$back" : "{!v='"+backQ+"'}";
+
+      jsonData.putAll(options);
+      
+      // we don't allow these to be overridden by options, so set them now...
+      jsonData.put("type", "func");
+      jsonData.put("func", "relatedness("+f+","+b+")");
+      
+    }
+    @Override
+    public void write(JSONWriter writer) {
+      writer.write(jsonData);
+    }
+    
+    public static RelatednessFacet buildRandom() {
+
+      final Map<String,Object> options = new LinkedHashMap<>();
+      if (random().nextBoolean()) {
+        options.put("min_popularity", "0.001");
+      }
+      
+      // bias this in favor of null fore/back since that's most realistic for typical nested facets
+      final boolean simple = random().nextBoolean();
+      final String fore = simple ? null : buildRandomORQuery(TestUtil.nextInt(random(), 1, 5));
+      final String back = simple ? null : buildRandomORQuery(TestUtil.nextInt(random(), 1, 9));
+      
+      return new RelatednessFacet(fore, back, options);
+    }
+  }
+  
+  /**
+   * Trivial data structure for modeling a simple terms facet that can be written out as a json.facet param.
+   * Since the point of this test is SKG, every TermFacet implicitly has one fixed "skg" subFacet, but that 
+   * can be overridden by the caller
+   *
+   * Doesn't do any string escaping or quoting, so don't use whitespace or reserved json characters
+   *
+   * The resulting facets all specify a <code>method</code> of <code>${method_val:smart}</code> which may be 
+   * overridden via request params. 
+   */
+  private static final class TermFacet implements Facet, Writable {
+
+    public final Map<String,Object> jsonData = new LinkedHashMap<>();
+    public final Map<String,Facet> subFacets = new LinkedHashMap<>();
+
+    /** 
+     * @param field must be non null
+     * @param options can set any of options used in a term facet other then field or (sub) facets
+     */
+    public TermFacet(final String field, final Map<String,Object> options) {
+      assert null != field;
+      
+      jsonData.put("method", "${method_val:smart}");
+      
+      jsonData.putAll(options);
+
+      // we don't allow these to be overridden by options, so set them now...
+      jsonData.put("type", "terms");
+      jsonData.put("field",field);
+      jsonData.put("facet", subFacets);
+      
+      subFacets.put("skg", new RelatednessFacet());
+    }
+
+    /** all params except field can be null */
+    public TermFacet(String field, Integer limit, Integer overrequest, String sort, Boolean refine) {
+      this(field, map("limit", limit, "overrequest", overrequest, "sort", sort, "refine", refine));
+    }
+    
+    @Override
+    public void write(JSONWriter writer) {
+      writer.write(jsonData);
+    }
+
+    /** 
+     * Generates a random TermFacet that does not contai nany random sub-facets 
+     * beyond a single consistent "skg" stat) 
+     */
+    public static TermFacet buildRandom() {
+      final String sort = randomSortParam(random());
+      final String facetField = randomFacetField(random());
+      return new TermFacet(facetField,
+                           map("limit", randomLimitParam(random()),
+                               "overrequest", randomOverrequestParam(random()),
+                               "prefix", randomPrefixParam(random(), facetField),
+                               "perSeg", randomPerSegParam(random()),
+                               "sort", sort,
+                               "prelim_sort", randomPrelimSortParam(random(), sort),
+                               // SOLR-14467 // "allBuckets", randomAllBucketsParam(random()),
+                               "refine", randomRefineParam(random())));
+    }
+    
+    /**
+     * Factory method for generating some random facets.  
+     *
+     * For simplicity, each facet will have a unique key name.
+     */
+    public static Map<String,TermFacet> buildRandomFacets() {
+      // for simplicity, use a unique facet key regardless of depth - simplifies verification
+      // and let's us enforce a hard limit on the total number of facets in a request
+      AtomicInteger keyCounter = new AtomicInteger(0);
+      
+      final int maxDepth = TestUtil.nextInt(random(), 0, (usually() ? 2 : 3));
+      return buildRandomFacets(keyCounter, maxDepth);
+    }
+    
+    /**
+     * picks a random field to facet on.
+     *
+     * @see #field
+     * @return field name, never null
+     */
+    public static String randomFacetField(final Random r) {
+      final int fieldNum = r.nextInt(MAX_FIELD_NUM);
+      switch(r.nextInt(4)) {
+        case 0: return multiStrField(fieldNum);
+        case 1: return multiIntField(fieldNum);
+        case 2: return soloStrField(fieldNum);
+        case 3: return soloIntField(fieldNum);
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+  
+    /**
+     * picks a random value for the "allBuckets" param, biased in favor of interesting test cases
+     *
+     * @return a Boolean, may be null
+     * @see #testBespokeAllBuckets
+     */
+    public static Boolean randomAllBucketsParam(final Random r) {
+
+      switch(r.nextInt(4)) {
+        case 0: return true;
+        case 1: return false;
+        case 2: 
+        case 3: return null;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+
+    /**
+     * picks a random value for the "refine" param, biased in favor of interesting test cases
+     *
+     * @return a Boolean, may be null
+     */
+    public static Boolean randomRefineParam(final Random r) {
+
+      switch(r.nextInt(3)) {
+        case 0: return null;
+        case 1: return true;
+        case 2: return false;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+    
+    /**
+     * picks a random value for the "perSeg" param, biased in favor of interesting test cases
+     *
+     * @return a Boolean, may be null
+     */
+    public static Boolean randomPerSegParam(final Random r) {
+
+      switch(r.nextInt(4)) {
+        case 0: return true;
+        case 1: return false;
+        case 2: 
+        case 3: return null;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+    
+    /**
+     * picks a random value for the "prefix" param, biased in favor of interesting test cases
+     *
+     * @return a valid prefix value, may be null
+     */
+    public static String randomPrefixParam(final Random r, final String facetField) {
+      
+      if (facetField.contains("multi_i") || facetField.contains("solo_i")) {
+        // never used a prefix on a numeric field
+        return null;
+      }
+      assert (facetField.contains("multi_s") || facetField.contains("solo_s"))
+        : "possible facet fields have changed, breaking test";
+      
+      switch(r.nextInt(5)) {
+        case 0: return "2";
+        case 1: return "3";
+        case 2: 
+        case 3: 
+        case 4: return null;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+    
+    /**
+     * picks a random value for the "sort" param, biased in favor of interesting test cases.  
+     * Assumes every TermFacet will have at least one "skg" stat
+     *
+     * @return a sort string (w/direction), or null to specify nothing (trigger default behavior)
+     * @see #randomPrelimSortParam
+     */
+    public static String randomSortParam(final Random r) {
+
+      final String dir = random().nextBoolean() ? "asc" : "desc";
+      switch(r.nextInt(4)) {
+        case 0: return null;
+        case 1: return "count " + dir;
+        case 2: return "skg " + dir;
+        case 3: return "index " + dir;
+        default: throw new RuntimeException("Broken case statement");
+      }
+    }
+    /**
+     * picks a random value for the "prelim_sort" param, biased in favor of interesting test cases.  
+     *
+     * @return a sort string (w/direction), or null to specify nothing (trigger default behavior)
+     * @see #randomSortParam
+     */
+    public static String randomPrelimSortParam(final Random r, final String sort) {
+
+      if (null != sort && sort.startsWith("skg") && 1 == TestUtil.nextInt(random(), 0, 3)) {
+        return "count desc";
+      }
+      return null;
+    }
+    /**
+     * picks a random value for the "limit" param, biased in favor of interesting test cases
+     *
+     * @return a number to specify in the request, or null to specify nothing (trigger default behavior)
+     * @see #UNIQUE_FIELD_VALS
+     */
+    public static Integer randomLimitParam(final Random r) {
+
+      final int limit = 1 + r.nextInt((int) (UNIQUE_FIELD_VALS * 1.5F));
+      
+      if (1 == TestUtil.nextInt(random(), 0, 3)) {
+        // bias in favor of just using default
+        return null;
+      }
+      
+      if (limit >= UNIQUE_FIELD_VALS && r.nextBoolean()) {
+        return -1; // unlimited
+      }
+      
+      return limit;
+    }
+    
+    /**
+     * picks a random value for the "overrequest" param, biased in favor of interesting test cases.
+     *
+     * @return a number to specify in the request, or null to specify nothing (trigger default behavior)
+     * @see #UNIQUE_FIELD_VALS
+     */
+    public static Integer randomOverrequestParam(final Random r) {
+      switch(r.nextInt(10)) {
+        case 0:
+        case 1:
+        case 2:
+        case 3:
+          return 0; // 40% of the time, disable overrequest to better stress refinement
+        case 4:
+        case 5:
+          return r.nextInt(UNIQUE_FIELD_VALS); // 20% ask for less them what's needed
+        case 6:
+          return r.nextInt(Integer.MAX_VALUE); // 10%: completley random value, statisticaly more then enough
+        default: break;
+      }
+      // else.... either leave param unspecified (or redundently specify the -1 default)
+      return r.nextBoolean() ? null : -1;
+    }
+
+    /** 
+     * recursive helper method for building random facets
+     *
+     * @param keyCounter used to ensure every generated facet has a unique key name
+     * @param maxDepth max possible depth allowed for the recusion, a lower value may be used depending on how many facets are returned at the current level. 
+     */
+    private static Map<String,TermFacet> buildRandomFacets(AtomicInteger keyCounter, int maxDepth) {
+      final int numFacets = Math.max(1, TestUtil.nextInt(random(), -1, 3)); // 3/5th chance of being '1'
+      Map<String,TermFacet> results = new LinkedHashMap<>();
+      for (int i = 0; i < numFacets; i++) {
+        if (keyCounter.get() < 3) { // a hard limit on the total number of facets (regardless of depth) to reduce OOM risk
+
+          final TermFacet facet = TermFacet.buildRandom();
+          
+          results.put("facet_" + keyCounter.incrementAndGet(), facet);
+          if (0 < maxDepth) {
+            // if we're going wide, don't go deep
+            final int nextMaxDepth = Math.max(0, maxDepth - numFacets);
+            facet.subFacets.putAll(buildRandomFacets(keyCounter, TestUtil.nextInt(random(), 0, nextMaxDepth)));
+          }
+          
+          // we get one implicit RelatednessFacet automatically,
+          // randomly add 1 or 2 more ... 3/5th chance of being '0'
+          final int numExtraSKGStats = Math.max(0, TestUtil.nextInt(random(), -2, 2)); 
+          for (int skgId = 0; skgId < numExtraSKGStats; skgId++) {
+            // sometimes we overwrite the trivial defualt "skg" with this one...
+            final String key = (0 == skgId && 0 == TestUtil.nextInt(random(), 0, 5)) ? "skg" : "skg" + skgId;
+            facet.subFacets.put(key, RelatednessFacet.buildRandom());
+          }
+
+          if (1 == TestUtil.nextInt(random(), 0, 4)) {
+            // occasionally add in a non-SKG related stat...
+            facet.subFacets.put("sum", SumFacet.buildRandom());
+          }
+        }
+      }
+      return results;
+    }
+  }
+
+  /** 
+   * returns a random SolrClient -- either a CloudSolrClient, or an HttpSolrClient pointed 
+   * at a node in our cluster 
+   */
+  public static SolrClient getRandClient(Random rand) {
+    int numClients = CLIENTS.size();
+    int idx = TestUtil.nextInt(rand, 0, numClients);
+
+    return (idx == numClients) ? CLOUD_CLIENT : CLIENTS.get(idx);
+  }
+
+  /**
+   * Uses a random SolrClient to execture a request and returns only the numFound
+   * @see #getRandClient
+   */
+  public static long getNumFound(final SolrParams req) throws SolrServerException, IOException {
+    return getRandClient(random()).query(req).getResults().getNumFound();
+  }
+  
+  public static void waitForRecoveriesToFinish(CloudSolrClient client) throws Exception {
+    assert null != client.getDefaultCollection();
+    AbstractDistribZkTestBase.waitForRecoveriesToFinish(client.getDefaultCollection(),
+                                                        client.getZkStateReader(),
+                                                        true, true, 330);
+  }
+
+  /** helper macro: fails on null keys, skips pairs with null values  */
+  public static Map<String,Object> map(Object... pairs) {
+    if (0 != pairs.length % 2) throw new IllegalArgumentException("uneven number of arguments");
+    final Map<String,Object> map = new LinkedHashMap<>();
+    for (int i = 0; i < pairs.length; i+=2) {
+      final Object key = pairs[i];
+      final Object val = pairs[i+1];
+      if (null == key) throw new NullPointerException("arguemnt " + i);
+      if (null == val) continue;
+      
+      map.put(key.toString(), val);
+    }
+    return map;
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index c3d31ff..44634c7 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -239,6 +239,39 @@ public class TestJsonFacets extends SolrTestCaseHS {
     client.commit();
   }
 
+  public void testMultiValuedBucketReHashing() throws Exception {
+    Client client = Client.localClient();
+    client.deleteByQuery("*:*", null);
+    // we want a domain with a small number of documents, and more facet (point) values then docs so
+    // that we force dvhash to increase the number of slots via resize...
+    // (NOTE: normal resizing won't happen w/o at least 1024 slots, but test static overrides this to '2')
+    client.add(sdoc("id", "1",
+                    "f_sd", "qqq",
+                    "f_ids", "4", "f_ids", "2", "f_ids", "999",
+                    "x_ids", "3", "x_ids", "5", "x_ids", "7",
+                    "z_ids", "42"), null);
+    client.add(sdoc("id", "2",
+                    "f_sd", "nnn",
+                    "f_ids", "44", "f_ids", "22", "f_ids", "999",
+                    "x_ids", "33", "x_ids", "55", "x_ids", "77",
+                    "z_ids", "666"), null);
+    client.add(sdoc("id", "3",
+                    "f_sd", "ggg",
+                    "f_ids", "444", "f_ids", "222", "f_ids", "999",
+                    "x_ids", "333", "x_ids", "555", "x_ids", "777",
+                    "z_ids", "1010101"), null);
+    client.commit();
+
+    // faceting on a multivalued point field sorting on a stat...
+    assertJQ(req("rows", "0", "q", "id:[1 TO 2]", "json.facet"
+                 , "{ f : { type: terms, field: f_ids, limit: 1, sort: 'x desc', "
+                 + "        facet: { x : 'sum(x_ids)', z : 'min(z_ids)' } } }")
+             , "response/numFound==2"
+             , "facets/count==2"
+             , "facets/f=={buckets:[{ val:999, count:2, x:180.0, z:42 }]}"
+             );
+  }
+
   public void testBehaviorEquivilenceOfUninvertibleFalse() throws Exception {
     Client client = Client.localClient();
     indexSimple(client);
@@ -407,31 +440,53 @@ public class TestJsonFacets extends SolrTestCaseHS {
       // So all of these re/sort options should produce identical output (since the num buckets is < limit)
       // - Testing "index" sort allows the randomized use of "stream" processor as default to be tested.
       // - Testing (re)sorts on other stats sanity checks code paths where relatedness() is a "defered" Agg
-      assertJQ(req("q", "cat_s:[* TO *]", "rows", "0",
-                   "fore", "where_s:NY", "back", "*:*",
-                   "json.facet", ""
-                   + "{x: { type: terms, field: 'cat_s', "+sort+", "
-                   + "      facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }")
-               , "facets=={count:5, x:{ buckets:["
-               + "   { val:'A', count:2, y:5.0, z:2, "
-               + "     skg : { relatedness: 0.00554, "
-               //+ "             foreground_count: 1, "
-               //+ "             foreground_size: 2, "
-               //+ "             background_count: 2, "
-               //+ "             background_size: 6,"
-               + "             foreground_popularity: 0.16667,"
-               + "             background_popularity: 0.33333, },"
-               + "   }, "
-               + "   { val:'B', count:3, y:-3.0, z:-5, "
-               + "     skg : { relatedness: 0.0, " // perfectly average and uncorrolated
-               //+ "             foreground_count: 1, "
-               //+ "             foreground_size: 2, "
-               //+ "             background_count: 3, "
-               //+ "             background_size: 6,"
-               + "             foreground_popularity: 0.16667,"
-               + "             background_popularity: 0.5 },"
-               + "   } ] } } "
-               );
+      for (String limit : Arrays.asList(", ", ", limit:5, ", ", limit:-1, ")) {
+        // results shouldn't change regardless of our limit param"
+        assertJQ(req("q", "cat_s:[* TO *]", "rows", "0",
+                     "fore", "where_s:NY", "back", "*:*",
+                     "json.facet", ""
+                     + "{x: { type: terms, field: 'cat_s', "+sort + limit
+                     + "      facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }")
+                 , "facets=={count:5, x:{ buckets:["
+                 + "   { val:'A', count:2, y:5.0, z:2, "
+                 + "     skg : { relatedness: 0.00554, "
+                 //+ "             foreground_count: 1, "
+                 //+ "             foreground_size: 2, "
+                 //+ "             background_count: 2, "
+                 //+ "             background_size: 6,"
+                 + "             foreground_popularity: 0.16667,"
+                 + "             background_popularity: 0.33333, },"
+                 + "   }, "
+                 + "   { val:'B', count:3, y:-3.0, z:-5, "
+                 + "     skg : { relatedness: 0.0, " // perfectly average and uncorrolated
+                 //+ "             foreground_count: 1, "
+                 //+ "             foreground_size: 2, "
+                 //+ "             background_count: 3, "
+                 //+ "             background_size: 6,"
+                 + "             foreground_popularity: 0.16667,"
+                 + "             background_popularity: 0.5 },"
+                 + "   } ] } } "
+                 );
+        // same query with a prefix of 'B' should produce only a single bucket with exact same results
+        assertJQ(req("q", "cat_s:[* TO *]", "rows", "0",
+                     "fore", "where_s:NY", "back", "*:*",
+                     "json.facet", ""
+                     + "{x: { type: terms, field: 'cat_s', prefix:'B', "+sort + limit
+                     + "      facet: { skg: 'relatedness($fore,$back)', y:'sum(num_i)', z:'min(num_i)' } } }")
+                 , "facets=={count:5, x:{ buckets:["
+                 + "   { val:'B', count:3, y:-3.0, z:-5, "
+                 + "     skg : { relatedness: 0.0, " // perfectly average and uncorrolated
+                 //+ "             foreground_count: 1, "
+                 //+ "             foreground_size: 2, "
+                 //+ "             background_count: 3, "
+                 //+ "             background_size: 6,"
+                 + "             foreground_popularity: 0.16667,"
+                 + "             background_popularity: 0.5 },"
+                 + "   } ] } } "
+                 );
+
+        
+      }
     }
     
     // trivial sanity check that we can (re)sort on SKG after pre-sorting on count...


[lucene-solr] 43/47: SOLR-14491: Intercepting internode requests in KerberosPlugin when HTTP/2 client is used

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 270bdc16a39bb246bbec020365779c1f669a2ea2
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Mon Jun 1 13:37:12 2020 +0530

    SOLR-14491: Intercepting internode requests in KerberosPlugin when HTTP/2 client is used
---
 solr/CHANGES.txt                                   |  2 ++
 .../org/apache/solr/security/KerberosPlugin.java   | 27 ++++++++++++++++++++++
 2 files changed, 29 insertions(+)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index ca3a950..1d13f76 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -200,6 +200,8 @@ Bug Fixes
 
 * SOLR-14498: Upgrade to Caffeine 2.8.4, which fixes the cache poisoning issue. (Jakub Zytka, ab)
 
+* SOLR-14491: Intercepting internode requests in KerberosPlugin when HTTP/2 client is used (Ishan Chattopadhyaya, Moshe Bla)
+
 Other Changes
 ---------------------
 * SOLR-14197: SolrResourceLoader: marked many methods as deprecated, and in some cases rerouted exiting logic to avoid
diff --git a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
index 9a8bda4..9fd9397 100644
--- a/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/KerberosPlugin.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthentica
 import org.apache.http.HttpRequest;
 import org.apache.http.protocol.HttpContext;
 import org.apache.solr.client.solrj.impl.Http2SolrClient;
+import org.apache.solr.client.solrj.impl.HttpListenerFactory;
 import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
 import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
 import org.apache.solr.cloud.ZkController;
@@ -45,6 +46,7 @@ import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.servlet.SolrDispatchFilter;
+import org.eclipse.jetty.client.api.Request;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -96,6 +98,7 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu
     }
   }
 
+
   @VisibleForTesting
   protected FilterConfig getInitFilterConfig(Map<String, Object> pluginConfig, boolean skipKerberosChecking) {
     Map<String, String> params = new HashMap();
@@ -259,12 +262,36 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu
   }
 
   @Override
+  protected boolean interceptInternodeRequest(Request request) {
+    SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
+    if (info != null && (info.getAction() == SolrDispatchFilter.Action.FORWARD ||
+        info.getAction() == SolrDispatchFilter.Action.REMOTEQUERY)) {
+      if (info.getUserPrincipal() != null) {
+        if (log.isInfoEnabled()) {
+          log.info("Setting original user principal: {}", info.getUserPrincipal().getName());
+        }
+        request.header(ORIGINAL_USER_PRINCIPAL_HEADER, info.getUserPrincipal().getName());
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @Override
   public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder builder) {
     return kerberosBuilder.getBuilder(builder);
   }
 
   @Override
   public void setup(Http2SolrClient client) {
+    final HttpListenerFactory.RequestResponseListener listener = new HttpListenerFactory.RequestResponseListener() {
+      @Override
+      public void onQueued(Request request) {
+        interceptInternodeRequest(request);
+      }
+    };
+    client.addListenerFactory(() -> listener);
+
     kerberosBuilder.setup(client);
   }
 


[lucene-solr] 39/47: LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 2a224cb08cca9b2ea8086741552c81adc9ceb7b2
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Fri May 29 14:59:36 2020 +0200

    LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)
---
 lucene/CHANGES.txt                                 |   3 +
 .../java/org/apache/lucene/index/SegmentInfos.java | 236 +++++++++++----------
 .../org/apache/lucene/index/TestSegmentInfos.java  |  61 ++++++
 3 files changed, 187 insertions(+), 113 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index cd42f6e..50b7f7b 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -195,6 +195,9 @@ Improvements
 * LUCENE-9342: TotalHits' relation will be EQUAL_TO when the number of hits is lower than TopDocsColector's numHits
   (Tomás Fernández Löbbe)
 
+* LUCENE-9359: SegmentInfos#readCommit now always returns a
+  CorruptIndexException if the content of the file is invalid. (Adrien Grand)
+
 Optimizations
 ---------------------
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
index f9edccd..dc379ab 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
@@ -304,136 +304,146 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
 
   /** Read the commit from the provided {@link ChecksumIndexInput}. */
   public static final SegmentInfos readCommit(Directory directory, ChecksumIndexInput input, long generation) throws IOException {
+    Throwable priorE = null;
+    int format = -1;
+    try {
+      // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
+      // to read the magic ourselves.
+      int magic = input.readInt();
+      if (magic != CodecUtil.CODEC_MAGIC) {
+        throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
+      }
+      format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
+      byte id[] = new byte[StringHelper.ID_LENGTH];
+      input.readBytes(id, 0, id.length);
+      CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
+
+      Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
+      int indexCreatedVersion = input.readVInt();
+      if (luceneVersion.major < indexCreatedVersion) {
+        throw new CorruptIndexException("Creation version [" + indexCreatedVersion
+            + ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
+      }
 
-    // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
-    // to read the magic ourselves.
-    int magic = input.readInt();
-    if (magic != CodecUtil.CODEC_MAGIC) {
-      throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
-    }
-    int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
-    byte id[] = new byte[StringHelper.ID_LENGTH];
-    input.readBytes(id, 0, id.length);
-    CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
-
-    Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-    int indexCreatedVersion = input.readVInt();
-    if (luceneVersion.major < indexCreatedVersion) {
-      throw new CorruptIndexException("Creation version [" + indexCreatedVersion
-          + ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
-    }
-
-    if (indexCreatedVersion < Version.LATEST.major - 1) {
-      throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
-          + indexCreatedVersion + ".x while the current version is " + Version.LATEST
-          + " and Lucene only supports reading the current and previous major versions.");
-    }
-
-    SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
-    infos.id = id;
-    infos.generation = generation;
-    infos.lastGeneration = generation;
-    infos.luceneVersion = luceneVersion;
-
-    infos.version = input.readLong();
-    //System.out.println("READ sis version=" + infos.version);
-    if (format > VERSION_70) {
-      infos.counter = input.readVLong();
-    } else {
-      infos.counter = input.readInt();
-    }
-    int numSegments = input.readInt();
-    if (numSegments < 0) {
-      throw new CorruptIndexException("invalid segment count: " + numSegments, input);
-    }
+      if (indexCreatedVersion < Version.LATEST.major - 1) {
+        throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
+            + indexCreatedVersion + ".x while the current version is " + Version.LATEST
+            + " and Lucene only supports reading the current and previous major versions.");
+      }
 
-    if (numSegments > 0) {
-      infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-    } else {
-      // else leave as null: no segments
-    }
+      SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
+      infos.id = id;
+      infos.generation = generation;
+      infos.lastGeneration = generation;
+      infos.luceneVersion = luceneVersion;
 
-    long totalDocs = 0;
-    for (int seg = 0; seg < numSegments; seg++) {
-      String segName = input.readString();
-      byte[] segmentID = new byte[StringHelper.ID_LENGTH];
-      input.readBytes(segmentID, 0, segmentID.length);
-      Codec codec = readCodec(input);
-      SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
-      info.setCodec(codec);
-      totalDocs += info.maxDoc();
-      long delGen = input.readLong();
-      int delCount = input.readInt();
-      if (delCount < 0 || delCount > info.maxDoc()) {
-        throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
-      }
-      long fieldInfosGen = input.readLong();
-      long dvGen = input.readLong();
-      int softDelCount = format > VERSION_72 ? input.readInt() : 0;
-      if (softDelCount < 0 || softDelCount > info.maxDoc()) {
-        throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input);
+      infos.version = input.readLong();
+      //System.out.println("READ sis version=" + infos.version);
+      if (format > VERSION_70) {
+        infos.counter = input.readVLong();
+      } else {
+        infos.counter = input.readInt();
       }
-      if (softDelCount + delCount > info.maxDoc()) {
-        throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input);
+      int numSegments = input.readInt();
+      if (numSegments < 0) {
+        throw new CorruptIndexException("invalid segment count: " + numSegments, input);
       }
-      final byte[] sciId;
-      if (format > VERSION_74) {
-        byte marker = input.readByte();
-        switch (marker) {
-          case 1:
-            sciId = new byte[StringHelper.ID_LENGTH];
-            input.readBytes(sciId, 0, sciId.length);
-            break;
-          case 0:
-            sciId = null;
-            break;
-          default:
-            throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input);
-        }
+
+      if (numSegments > 0) {
+        infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
       } else {
-        sciId = null;
+        // else leave as null: no segments
       }
-      SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId);
-      siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
-      final Map<Integer,Set<String>> dvUpdateFiles;
-      final int numDVFields = input.readInt();
-      if (numDVFields == 0) {
-        dvUpdateFiles = Collections.emptyMap();
-      } else {
-        Map<Integer,Set<String>> map = new HashMap<>(numDVFields);
-        for (int i = 0; i < numDVFields; i++) {
-          map.put(input.readInt(), input.readSetOfStrings());
+
+      long totalDocs = 0;
+      for (int seg = 0; seg < numSegments; seg++) {
+        String segName = input.readString();
+        byte[] segmentID = new byte[StringHelper.ID_LENGTH];
+        input.readBytes(segmentID, 0, segmentID.length);
+        Codec codec = readCodec(input);
+        SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
+        info.setCodec(codec);
+        totalDocs += info.maxDoc();
+        long delGen = input.readLong();
+        int delCount = input.readInt();
+        if (delCount < 0 || delCount > info.maxDoc()) {
+          throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
         }
-        dvUpdateFiles = Collections.unmodifiableMap(map);
-      }
-      siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
-      infos.add(siPerCommit);
+        long fieldInfosGen = input.readLong();
+        long dvGen = input.readLong();
+        int softDelCount = format > VERSION_72 ? input.readInt() : 0;
+        if (softDelCount < 0 || softDelCount > info.maxDoc()) {
+          throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input);
+        }
+        if (softDelCount + delCount > info.maxDoc()) {
+          throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input);
+        }
+        final byte[] sciId;
+        if (format > VERSION_74) {
+          byte marker = input.readByte();
+          switch (marker) {
+            case 1:
+              sciId = new byte[StringHelper.ID_LENGTH];
+              input.readBytes(sciId, 0, sciId.length);
+              break;
+            case 0:
+              sciId = null;
+              break;
+            default:
+              throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input);
+          }
+        } else {
+          sciId = null;
+        }
+        SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId);
+        siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
+        final Map<Integer,Set<String>> dvUpdateFiles;
+        final int numDVFields = input.readInt();
+        if (numDVFields == 0) {
+          dvUpdateFiles = Collections.emptyMap();
+        } else {
+          Map<Integer,Set<String>> map = new HashMap<>(numDVFields);
+          for (int i = 0; i < numDVFields; i++) {
+            map.put(input.readInt(), input.readSetOfStrings());
+          }
+          dvUpdateFiles = Collections.unmodifiableMap(map);
+        }
+        siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
+        infos.add(siPerCommit);
 
-      Version segmentVersion = info.getVersion();
+        Version segmentVersion = info.getVersion();
 
-      if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
-        throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
-      }
+        if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
+          throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
+        }
 
-      if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
-        throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
-      }
+        if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
+          throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
+        }
 
-      if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
-        throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
+        if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
+          throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
+        }
       }
-    }
 
-    infos.userData = input.readMapOfStrings();
+      infos.userData = input.readMapOfStrings();
 
-    CodecUtil.checkFooter(input);
+      // LUCENE-6299: check we are in bounds
+      if (totalDocs > IndexWriter.getActualMaxDocs()) {
+        throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input);
+      }
 
-    // LUCENE-6299: check we are in bounds
-    if (totalDocs > IndexWriter.getActualMaxDocs()) {
-      throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input);
+      return infos;
+    } catch (Throwable t) {
+      priorE = t;
+    } finally {
+      if (format >= VERSION_70) { // oldest supported version
+        CodecUtil.checkFooter(input, priorE);
+      } else {
+        throw IOUtils.rethrowAlways(priorE);
+      }
     }
-
-    return infos;
+    throw new Error("Unreachable code");
   }
 
   private static Codec readCodec(DataInput input) throws IOException {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
index 19d8214..23c98ad 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
@@ -18,12 +18,16 @@ package org.apache.lucene.index;
 
 
 import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
+import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.Version;
 
 import java.io.IOException;
@@ -178,5 +182,62 @@ public class TestSegmentInfos extends LuceneTestCase {
       assertEquals("clone changed but shouldn't", StringHelper.idToString(id), StringHelper.idToString(clone.getId()));
     }
   }
+
+  public void testBitFlippedTriggersCorruptIndexException() throws IOException {
+    BaseDirectoryWrapper dir = newDirectory();
+    dir.setCheckIndexOnClose(false);
+    byte id[] = StringHelper.randomId();
+    Codec codec = Codec.getDefault();
+
+    SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
+    SegmentInfo info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, Codec.getDefault(),
+                                       Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
+    info.setFiles(Collections.<String>emptySet());
+    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
+    SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, 0, -1, -1, -1, StringHelper.randomId());
+    sis.add(commitInfo);
+
+    info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_1", 1, false, Codec.getDefault(),
+                           Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
+    info.setFiles(Collections.<String>emptySet());
+    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
+    commitInfo = new SegmentCommitInfo(info, 0, 0,-1, -1, -1, StringHelper.randomId());
+    sis.add(commitInfo);
+
+    sis.commit(dir);
+
+    BaseDirectoryWrapper corruptDir = newDirectory();
+    corruptDir.setCheckIndexOnClose(false);
+    boolean corrupt = false;
+    for (String file : dir.listAll()) {
+      if (file.startsWith(IndexFileNames.SEGMENTS)) {
+        try (IndexInput in = dir.openInput(file, IOContext.DEFAULT);
+            IndexOutput out = corruptDir.createOutput(file, IOContext.DEFAULT)) {
+          final long corruptIndex = TestUtil.nextLong(random(), 0, in.length() - 1);
+          out.copyBytes(in, corruptIndex);
+          final int b = Byte.toUnsignedInt(in.readByte()) + TestUtil.nextInt(random(), 0x01, 0xff);
+          out.writeByte((byte) b);
+          out.copyBytes(in, in.length() - in.getFilePointer());
+        }
+        try (IndexInput in = corruptDir.openInput(file, IOContext.DEFAULT)) {
+          CodecUtil.checksumEntireFile(in);
+          if (VERBOSE) {
+            System.out.println("TEST: Altering the file did not update the checksum, aborting...");
+          }
+          return;
+        } catch (CorruptIndexException e) {
+          // ok
+        }
+        corrupt = true;
+      } else if (slowFileExists(corruptDir, file) == false) { // extraFS
+        corruptDir.copyFrom(dir, file, file, IOContext.DEFAULT);
+      }
+    }
+    assertTrue("No segments file found", corrupt);
+
+    expectThrows(CorruptIndexException.class, () -> SegmentInfos.readLatestCommit(corruptDir));
+    dir.close();
+    corruptDir.close();
+  }
 }
 


[lucene-solr] 19/47: LUCENE-9330: Make SortFields responsible for index sorting and serialization (#1440)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 1ef077f5661b2092dd718d3022c38084cce8440b
Author: Alan Woodward <ro...@apache.org>
AuthorDate: Fri May 22 13:33:06 2020 +0100

    LUCENE-9330: Make SortFields responsible for index sorting and serialization (#1440)
    
    This commit adds a new class IndexSorter which handles how a sort should be applied
    to documents in an index:
    
    * how to serialize/deserialize sort info in the segment header
    * how to sort documents within a segment
    * how to sort documents from merging segments
    
    SortField has a getIndexSorter() method, which will return null if the sort cannot be used
    to sort an index (eg if it uses scores or other query-dependent values). This also requires a
    new Codec as there is a change to the SegmentInfoFormat
---
 lucene/CHANGES.txt                                 |   3 +
 .../codecs/lucene70/Lucene70SegmentInfoFormat.java | 168 +-------
 .../lucene/codecs/lucene70/package-info.java       |   2 +-
 .../lucene/codecs/lucene84/Lucene84Codec.java      |  46 +--
 .../lucene/codecs/lucene84}/package-info.java      |   4 +-
 .../services/org.apache.lucene.codecs.Codec        |   1 +
 .../lucene70/Lucene70RWSegmentInfoFormat.java      | 204 ++++++++++
 .../lucene70/TestLucene70SegmentInfoFormat.java    |  13 +-
 .../benchmark/byTask/tasks/CreateIndexTask.java    |   4 +-
 .../simpletext/SimpleTextSegmentInfoFormat.java    | 306 ++------------
 .../src/java/org/apache/lucene/codecs/Codec.java   |   2 +-
 .../lucene/codecs/lucene84/package-info.java       | 396 +-----------------
 .../Lucene86Codec.java}                            |  72 ++--
 .../codecs/lucene86/Lucene86SegmentInfoFormat.java | 217 ++++++++++
 .../{lucene84 => lucene86}/package-info.java       |  60 +--
 .../apache/lucene/index/BinaryDocValuesWriter.java |  29 +-
 .../apache/lucene/index/DefaultIndexingChain.java  | 183 ++++++---
 .../apache/lucene/index/DocValuesLeafReader.java   |  89 ++++
 .../org/apache/lucene/index/DocValuesWriter.java   |   8 +-
 .../java/org/apache/lucene/index/IndexSorter.java  | 448 +++++++++++++++++++++
 .../org/apache/lucene/index/IndexWriterConfig.java |  17 +-
 .../java/org/apache/lucene/index/MultiSorter.java  | 144 +------
 .../lucene/index/NumericDocValuesWriter.java       |  33 +-
 .../org/apache/lucene/index/SortFieldProvider.java | 118 ++++++
 .../apache/lucene/index/SortedDocValuesWriter.java |  50 +--
 .../lucene/index/SortedNumericDocValuesWriter.java |  33 +-
 .../lucene/index/SortedSetDocValuesWriter.java     |  45 +--
 .../src/java/org/apache/lucene/index/Sorter.java   | 238 +----------
 .../java/org/apache/lucene/search/SortField.java   | 135 +++++++
 .../lucene/search/SortedNumericSortField.java      | 106 +++++
 .../apache/lucene/search/SortedSetSortField.java   |  69 +++-
 .../services/org.apache.lucene.codecs.Codec        |   2 +-
 .../org.apache.lucene.index.SortFieldProvider      |  20 +
 ...tLucene50StoredFieldsFormatHighCompression.java |  11 +-
 .../codecs/lucene80/TestLucene80NormsFormat.java   |   4 +-
 .../TestLucene86SegmentInfoFormat.java}            |   6 +-
 .../org/apache/lucene/index/TestIndexSorting.java  |   5 +-
 .../org/apache/lucene/index/TestPointValues.java   |   4 +-
 .../org/apache/lucene/search/TestBoolean2.java     |   8 +-
 .../document/TestFloatPointNearestNeighbor.java    |   2 +-
 .../test/org/apache/lucene/search/TestNearest.java |   2 +-
 .../search/suggest/document/TestSuggestField.java  |  11 +-
 .../apache/lucene/geo/BaseGeoPointTestCase.java    |   2 +-
 .../org/apache/lucene/geo/BaseXYPointTestCase.java |   2 +-
 .../util/TestRuleSetupAndRestoreClassEnv.java      |  29 +-
 .../src/java/org/apache/lucene/util/TestUtil.java  |   4 +-
 .../org/apache/solr/core/SchemaCodecFactory.java   |   4 +-
 47 files changed, 1832 insertions(+), 1527 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 46c7063..078e8b5 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -149,6 +149,9 @@ API Changes
 * LUCENE-9339: MergeScheduler#merge doesn't accept a parameter if a new merge was found anymore.
   (Simon Willnauer)
 
+* LUCENE-9330: SortFields are now responsible for writing themselves into index headers if they
+  are used as index sorts.  (Alan Woodward, Uwe Schindler, Adrien Grand)
+
 * LUCENE-9340: Deprecate SimpleBindings#add(SortField). (Alan Woodward)
 
 * LUCENE-9345: MergeScheduler is now decoupled from IndexWriter. Instead it accepts a MergeSource
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
similarity index 63%
rename from lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
index ed55770..ab54012 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70SegmentInfoFormat.java
@@ -24,9 +24,9 @@ import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.SegmentInfoFormat;
 import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.index.IndexWriter; // javadocs
-import org.apache.lucene.index.SegmentInfo; // javadocs
-import org.apache.lucene.index.SegmentInfos; // javadocs
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.SegmentInfo;
+import org.apache.lucene.index.SegmentInfos;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.SortedNumericSelector;
@@ -34,10 +34,9 @@ import org.apache.lucene.search.SortedNumericSortField;
 import org.apache.lucene.search.SortedSetSelector;
 import org.apache.lucene.search.SortedSetSortField;
 import org.apache.lucene.store.ChecksumIndexInput;
-import org.apache.lucene.store.DataOutput; // javadocs
+import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.Version;
 
 /**
@@ -271,164 +270,7 @@ public class Lucene70SegmentInfoFormat extends SegmentInfoFormat {
 
   @Override
   public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
-    final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
-
-    try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
-      // Only add the file once we've successfully created it, else IFD assert can trip:
-      si.addFile(fileName);
-      CodecUtil.writeIndexHeader(output,
-                                   Lucene70SegmentInfoFormat.CODEC_NAME,
-                                   Lucene70SegmentInfoFormat.VERSION_CURRENT,
-                                   si.getId(),
-                                   "");
-      Version version = si.getVersion();
-      if (version.major < 7) {
-        throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si);
-      }
-      // Write the Lucene version that created this segment, since 3.1
-      output.writeInt(version.major);
-      output.writeInt(version.minor);
-      output.writeInt(version.bugfix);
-
-      // Write the min Lucene version that contributed docs to the segment, since 7.0
-      if (si.getMinVersion() != null) {
-        output.writeByte((byte) 1);
-        Version minVersion = si.getMinVersion();
-        output.writeInt(minVersion.major);
-        output.writeInt(minVersion.minor);
-        output.writeInt(minVersion.bugfix);
-      } else {
-        output.writeByte((byte) 0);
-      }
-
-      assert version.prerelease == 0;
-      output.writeInt(si.maxDoc());
-
-      output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
-      output.writeMapOfStrings(si.getDiagnostics());
-      Set<String> files = si.files();
-      for (String file : files) {
-        if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
-          throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
-        }
-      }
-      output.writeSetOfStrings(files);
-      output.writeMapOfStrings(si.getAttributes());
-
-      Sort indexSort = si.getIndexSort();
-      int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
-      output.writeVInt(numSortFields);
-      for (int i = 0; i < numSortFields; ++i) {
-        SortField sortField = indexSort.getSort()[i];
-        SortField.Type sortType = sortField.getType();
-        output.writeString(sortField.getField());
-        int sortTypeID;
-        switch (sortField.getType()) {
-          case STRING:
-            sortTypeID = 0;
-            break;
-          case LONG:
-            sortTypeID = 1;
-            break;
-          case INT:
-            sortTypeID = 2;
-            break;
-          case DOUBLE:
-            sortTypeID = 3;
-            break;
-          case FLOAT:
-            sortTypeID = 4;
-            break;
-          case CUSTOM:
-            if (sortField instanceof SortedSetSortField) {
-              sortTypeID = 5;
-              sortType = SortField.Type.STRING;
-            } else if (sortField instanceof SortedNumericSortField) {
-              sortTypeID = 6;
-              sortType = ((SortedNumericSortField) sortField).getNumericType();
-            } else {
-              throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
-            }
-            break;
-          default:
-            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
-        }
-        output.writeVInt(sortTypeID);
-        if (sortTypeID == 5) {
-          SortedSetSortField ssf = (SortedSetSortField) sortField;
-          if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
-            output.writeByte((byte) 0);
-          } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
-            output.writeByte((byte) 1);
-          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
-            output.writeByte((byte) 2);
-          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
-            output.writeByte((byte) 3);
-          } else {
-            throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
-          }
-        } else if (sortTypeID == 6) {
-          SortedNumericSortField snsf = (SortedNumericSortField) sortField;
-          if (snsf.getNumericType() == SortField.Type.LONG) {
-            output.writeByte((byte) 0);
-          } else if (snsf.getNumericType() == SortField.Type.INT) {
-            output.writeByte((byte) 1);
-          } else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
-            output.writeByte((byte) 2);
-          } else if (snsf.getNumericType() == SortField.Type.FLOAT) {
-            output.writeByte((byte) 3);
-          } else {
-            throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
-          }
-          if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
-            output.writeByte((byte) 0);
-          } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
-            output.writeByte((byte) 1);
-          } else {
-            throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
-          }
-        }
-        output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
-
-        // write missing value 
-        Object missingValue = sortField.getMissingValue();
-        if (missingValue == null) {
-          output.writeByte((byte) 0);
-        } else {
-          switch(sortType) {
-          case STRING:
-            if (missingValue == SortField.STRING_LAST) {
-              output.writeByte((byte) 1);
-            } else if (missingValue == SortField.STRING_FIRST) {
-              output.writeByte((byte) 2);
-            } else {
-              throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
-            }
-            break;
-          case LONG:
-            output.writeByte((byte) 1);
-            output.writeLong(((Long) missingValue).longValue());
-            break;
-          case INT:
-            output.writeByte((byte) 1);
-            output.writeInt(((Integer) missingValue).intValue());
-            break;
-          case DOUBLE:
-            output.writeByte((byte) 1);
-            output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
-            break;
-          case FLOAT:
-            output.writeByte((byte) 1);
-            output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
-            break;
-          default:
-            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
-          }
-        }
-      }
-
-      CodecUtil.writeFooter(output);
-    }
+    throw new UnsupportedOperationException("Old formats can't be used for writing");
   }
 
   /** File extension used to store {@link SegmentInfo}. */
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java
similarity index 96%
copy from lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java
copy to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java
index e1913a0..6bbf70c 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package-info.java
@@ -16,7 +16,7 @@
  */
 
 /**
- * Components from the Lucene 7.0 index format.  See {@link org.apache.lucene.codecs.lucene80}
+ * Components from the Lucene 7.0 index format.  See {@link org.apache.lucene.codecs.lucene86}
  * for an overview of the current index format.
  */
 package org.apache.lucene.codecs.lucene70;
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
similarity index 96%
copy from lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
copy to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
index e3f061a..579c6a0 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
@@ -60,31 +60,31 @@ public class Lucene84Codec extends Codec {
   private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
   private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
   private final PostingsFormat defaultFormat;
-  
+
   private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() {
     @Override
     public PostingsFormat getPostingsFormatForField(String field) {
       return Lucene84Codec.this.getPostingsFormatForField(field);
     }
   };
-  
+
   private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() {
     @Override
     public DocValuesFormat getDocValuesFormatForField(String field) {
       return Lucene84Codec.this.getDocValuesFormatForField(field);
     }
   };
-  
+
   private final StoredFieldsFormat storedFieldsFormat;
 
-  /** 
+  /**
    * Instantiates a new codec.
    */
   public Lucene84Codec() {
     this(Mode.BEST_SPEED);
   }
-  
-  /** 
+
+  /**
    * Instantiates a new codec, specifying the stored fields compression
    * mode to use.
    * @param mode stored fields compression mode to use for newly
@@ -95,12 +95,12 @@ public class Lucene84Codec extends Codec {
     this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode));
     this.defaultFormat = new Lucene84PostingsFormat();
   }
-  
+
   @Override
   public final StoredFieldsFormat storedFieldsFormat() {
     return storedFieldsFormat;
   }
-  
+
   @Override
   public final TermVectorsFormat termVectorsFormat() {
     return vectorsFormat;
@@ -110,17 +110,17 @@ public class Lucene84Codec extends Codec {
   public final PostingsFormat postingsFormat() {
     return postingsFormat;
   }
-  
+
   @Override
   public final FieldInfosFormat fieldInfosFormat() {
     return fieldInfosFormat;
   }
-  
+
   @Override
   public final SegmentInfoFormat segmentInfoFormat() {
     return segmentInfosFormat;
   }
-  
+
   @Override
   public final LiveDocsFormat liveDocsFormat() {
     return liveDocsFormat;
@@ -136,32 +136,32 @@ public class Lucene84Codec extends Codec {
     return new Lucene60PointsFormat();
   }
 
-  /** Returns the postings format that should be used for writing 
+  /** Returns the postings format that should be used for writing
    *  new segments of <code>field</code>.
-   *  
+   *
    *  The default implementation always returns "Lucene84".
    *  <p>
-   *  <b>WARNING:</b> if you subclass, you are responsible for index 
-   *  backwards compatibility: future version of Lucene are only 
-   *  guaranteed to be able to read the default implementation. 
+   *  <b>WARNING:</b> if you subclass, you are responsible for index
+   *  backwards compatibility: future version of Lucene are only
+   *  guaranteed to be able to read the default implementation.
    */
   public PostingsFormat getPostingsFormatForField(String field) {
     return defaultFormat;
   }
-  
-  /** Returns the docvalues format that should be used for writing 
+
+  /** Returns the docvalues format that should be used for writing
    *  new segments of <code>field</code>.
-   *  
+   *
    *  The default implementation always returns "Lucene80".
    *  <p>
-   *  <b>WARNING:</b> if you subclass, you are responsible for index 
-   *  backwards compatibility: future version of Lucene are only 
-   *  guaranteed to be able to read the default implementation. 
+   *  <b>WARNING:</b> if you subclass, you are responsible for index
+   *  backwards compatibility: future version of Lucene are only
+   *  guaranteed to be able to read the default implementation.
    */
   public DocValuesFormat getDocValuesFormatForField(String field) {
     return defaultDVFormat;
   }
-  
+
   @Override
   public final DocValuesFormat docValuesFormat() {
     return docValuesFormat;
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package-info.java
similarity index 86%
rename from lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java
rename to lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package-info.java
index e1913a0..5940a47 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/package-info.java
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene84/package-info.java
@@ -16,7 +16,7 @@
  */
 
 /**
- * Components from the Lucene 7.0 index format.  See {@link org.apache.lucene.codecs.lucene80}
+ * Components from the Lucene 8.4 index format.  See {@link org.apache.lucene.codecs.lucene86}
  * for an overview of the current index format.
  */
-package org.apache.lucene.codecs.lucene70;
+package org.apache.lucene.codecs.lucene84;
diff --git a/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec b/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
index a818e35..cf7a945 100644
--- a/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
+++ b/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
@@ -14,3 +14,4 @@
 #  limitations under the License.
 
 org.apache.lucene.codecs.lucene80.Lucene80Codec
+org.apache.lucene.codecs.lucene84.Lucene84Codec
diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java
new file mode 100644
index 0000000..75f31c2
--- /dev/null
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/Lucene70RWSegmentInfoFormat.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.codecs.lucene70;
+
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.SegmentInfo;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.SortedNumericSelector;
+import org.apache.lucene.search.SortedNumericSortField;
+import org.apache.lucene.search.SortedSetSelector;
+import org.apache.lucene.search.SortedSetSortField;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.Version;
+
+/**
+ * Writable version of Lucene70SegmentInfoFormat for testing
+ */
+public class Lucene70RWSegmentInfoFormat extends Lucene70SegmentInfoFormat {
+
+  @Override
+  public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
+    final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene70SegmentInfoFormat.SI_EXTENSION);
+
+    try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
+      // Only add the file once we've successfully created it, else IFD assert can trip:
+      si.addFile(fileName);
+      CodecUtil.writeIndexHeader(output,
+          Lucene70SegmentInfoFormat.CODEC_NAME,
+          Lucene70SegmentInfoFormat.VERSION_CURRENT,
+          si.getId(),
+          "");
+      Version version = si.getVersion();
+      if (version.major < 7) {
+        throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si);
+      }
+      // Write the Lucene version that created this segment, since 3.1
+      output.writeInt(version.major);
+      output.writeInt(version.minor);
+      output.writeInt(version.bugfix);
+
+      // Write the min Lucene version that contributed docs to the segment, since 7.0
+      if (si.getMinVersion() != null) {
+        output.writeByte((byte) 1);
+        Version minVersion = si.getMinVersion();
+        output.writeInt(minVersion.major);
+        output.writeInt(minVersion.minor);
+        output.writeInt(minVersion.bugfix);
+      } else {
+        output.writeByte((byte) 0);
+      }
+
+      assert version.prerelease == 0;
+      output.writeInt(si.maxDoc());
+
+      output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
+      output.writeMapOfStrings(si.getDiagnostics());
+      Set<String> files = si.files();
+      for (String file : files) {
+        if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
+          throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
+        }
+      }
+      output.writeSetOfStrings(files);
+      output.writeMapOfStrings(si.getAttributes());
+
+      Sort indexSort = si.getIndexSort();
+      int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
+      output.writeVInt(numSortFields);
+      for (int i = 0; i < numSortFields; ++i) {
+        SortField sortField = indexSort.getSort()[i];
+        SortField.Type sortType = sortField.getType();
+        output.writeString(sortField.getField());
+        int sortTypeID;
+        switch (sortField.getType()) {
+          case STRING:
+            sortTypeID = 0;
+            break;
+          case LONG:
+            sortTypeID = 1;
+            break;
+          case INT:
+            sortTypeID = 2;
+            break;
+          case DOUBLE:
+            sortTypeID = 3;
+            break;
+          case FLOAT:
+            sortTypeID = 4;
+            break;
+          case CUSTOM:
+            if (sortField instanceof SortedSetSortField) {
+              sortTypeID = 5;
+              sortType = SortField.Type.STRING;
+            } else if (sortField instanceof SortedNumericSortField) {
+              sortTypeID = 6;
+              sortType = ((SortedNumericSortField) sortField).getNumericType();
+            } else {
+              throw new IllegalStateException("Unexpected SortedNumericSortField " + sortField);
+            }
+            break;
+          default:
+            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
+        }
+        output.writeVInt(sortTypeID);
+        if (sortTypeID == 5) {
+          SortedSetSortField ssf = (SortedSetSortField) sortField;
+          if (ssf.getSelector() == SortedSetSelector.Type.MIN) {
+            output.writeByte((byte) 0);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MAX) {
+            output.writeByte((byte) 1);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MIN) {
+            output.writeByte((byte) 2);
+          } else if (ssf.getSelector() == SortedSetSelector.Type.MIDDLE_MAX) {
+            output.writeByte((byte) 3);
+          } else {
+            throw new IllegalStateException("Unexpected SortedSetSelector type: " + ssf.getSelector());
+          }
+        } else if (sortTypeID == 6) {
+          SortedNumericSortField snsf = (SortedNumericSortField) sortField;
+          if (snsf.getNumericType() == SortField.Type.LONG) {
+            output.writeByte((byte) 0);
+          } else if (snsf.getNumericType() == SortField.Type.INT) {
+            output.writeByte((byte) 1);
+          } else if (snsf.getNumericType() == SortField.Type.DOUBLE) {
+            output.writeByte((byte) 2);
+          } else if (snsf.getNumericType() == SortField.Type.FLOAT) {
+            output.writeByte((byte) 3);
+          } else {
+            throw new IllegalStateException("Unexpected SortedNumericSelector type: " + snsf.getNumericType());
+          }
+          if (snsf.getSelector() == SortedNumericSelector.Type.MIN) {
+            output.writeByte((byte) 0);
+          } else if (snsf.getSelector() == SortedNumericSelector.Type.MAX) {
+            output.writeByte((byte) 1);
+          } else {
+            throw new IllegalStateException("Unexpected sorted numeric selector type: " + snsf.getSelector());
+          }
+        }
+        output.writeByte((byte) (sortField.getReverse() ? 0 : 1));
+
+        // write missing value
+        Object missingValue = sortField.getMissingValue();
+        if (missingValue == null) {
+          output.writeByte((byte) 0);
+        } else {
+          switch(sortType) {
+            case STRING:
+              if (missingValue == SortField.STRING_LAST) {
+                output.writeByte((byte) 1);
+              } else if (missingValue == SortField.STRING_FIRST) {
+                output.writeByte((byte) 2);
+              } else {
+                throw new AssertionError("unrecognized missing value for STRING field \"" + sortField.getField() + "\": " + missingValue);
+              }
+              break;
+            case LONG:
+              output.writeByte((byte) 1);
+              output.writeLong(((Long) missingValue).longValue());
+              break;
+            case INT:
+              output.writeByte((byte) 1);
+              output.writeInt(((Integer) missingValue).intValue());
+              break;
+            case DOUBLE:
+              output.writeByte((byte) 1);
+              output.writeLong(Double.doubleToLongBits(((Double) missingValue).doubleValue()));
+              break;
+            case FLOAT:
+              output.writeByte((byte) 1);
+              output.writeInt(Float.floatToIntBits(((Float) missingValue).floatValue()));
+              break;
+            default:
+              throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
+          }
+        }
+      }
+
+      CodecUtil.writeFooter(output);
+    }
+  }
+
+}
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
similarity index 77%
copy from lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
copy to lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
index 3bf6a18..ac516a1 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
+++ b/lucene/backward-codecs/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
@@ -14,22 +14,29 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.lucene.codecs.lucene70;
 
 import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.FilterCodec;
+import org.apache.lucene.codecs.SegmentInfoFormat;
 import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
-import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.Version;
 
 public class TestLucene70SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
 
   @Override
   protected Version[] getVersions() {
-    return new Version[] { Version.LATEST };
+    return new Version[] { Version.LUCENE_8_4_0 };
   }
 
   @Override
   protected Codec getCodec() {
-    return TestUtil.getDefaultCodec();
+    return new FilterCodec("Lucene84", Codec.forName("Lucene84")) {
+      @Override
+      public SegmentInfoFormat segmentInfoFormat() {
+        return new Lucene70RWSegmentInfoFormat();
+      }
+    };
   }
 }
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
index 5510328..db64781 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
@@ -29,7 +29,7 @@ import org.apache.lucene.benchmark.byTask.PerfRunData;
 import org.apache.lucene.benchmark.byTask.utils.Config;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.lucene.index.ConcurrentMergeScheduler;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.index.IndexDeletionPolicy;
@@ -138,7 +138,7 @@ public class CreateIndexTask extends PerfTask {
     if (defaultCodec == null && postingsFormat != null) {
       try {
         final PostingsFormat postingsFormatChosen = PostingsFormat.forName(postingsFormat);
-        iwConf.setCodec(new Lucene84Codec() {
+        iwConf.setCodec(new Lucene86Codec() {
           @Override
           public PostingsFormat getPostingsFormatForField(String field) {
             return postingsFormatChosen;
diff --git a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
index 5f22f62..2acfe01 100644
--- a/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
+++ b/lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextSegmentInfoFormat.java
@@ -29,17 +29,16 @@ import java.util.Set;
 import org.apache.lucene.codecs.SegmentInfoFormat;
 import org.apache.lucene.index.CorruptIndexException;
 import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.IndexSorter;
 import org.apache.lucene.index.SegmentInfo;
+import org.apache.lucene.index.SortFieldProvider;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.SortedNumericSelector;
-import org.apache.lucene.search.SortedNumericSortField;
-import org.apache.lucene.search.SortedSetSelector;
-import org.apache.lucene.search.SortedSetSortField;
+import org.apache.lucene.store.ByteArrayDataInput;
 import org.apache.lucene.store.ChecksumIndexInput;
+import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.BytesRef;
@@ -68,11 +67,9 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
   final static BytesRef SI_FILE             = new BytesRef("      file ");
   final static BytesRef SI_ID               = new BytesRef("    id ");
   final static BytesRef SI_SORT             = new BytesRef("    sort ");
-  final static BytesRef SI_SORT_FIELD       = new BytesRef("      field ");
   final static BytesRef SI_SORT_TYPE        = new BytesRef("      type ");
-  final static BytesRef SI_SELECTOR_TYPE    = new BytesRef("      selector ");
-  final static BytesRef SI_SORT_REVERSE     = new BytesRef("      reverse ");
-  final static BytesRef SI_SORT_MISSING     = new BytesRef("      missing ");
+  final static BytesRef SI_SORT_NAME        = new BytesRef("      name ");
+  final static BytesRef SI_SORT_BYTES       = new BytesRef("      bytes ");
 
   public static final String SI_EXTENSION = "si";
   
@@ -171,133 +168,18 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
       SortField[] sortField = new SortField[numSortFields];
       for (int i = 0; i < numSortFields; ++i) {
         SimpleTextUtil.readLine(input, scratch);
-        assert StringHelper.startsWith(scratch.get(), SI_SORT_FIELD);
-        final String field = readString(SI_SORT_FIELD.length, scratch);
+        assert StringHelper.startsWith(scratch.get(), SI_SORT_NAME);
+        final String provider = readString(SI_SORT_NAME.length, scratch);
 
         SimpleTextUtil.readLine(input, scratch);
         assert StringHelper.startsWith(scratch.get(), SI_SORT_TYPE);
-        final String typeAsString = readString(SI_SORT_TYPE.length, scratch);
-
-        final SortField.Type type;
-        SortedSetSelector.Type selectorSet = null;
-        SortedNumericSelector.Type selectorNumeric = null;
-        switch (typeAsString) {
-          case "string":
-            type = SortField.Type.STRING;
-            break;
-          case "long":
-            type = SortField.Type.LONG;
-            break;
-          case "int":
-            type = SortField.Type.INT;
-            break;
-          case "double":
-            type = SortField.Type.DOUBLE;
-            break;
-          case "float":
-            type = SortField.Type.FLOAT;
-            break;
-          case "multi_valued_string":
-            type = SortField.Type.STRING;
-            selectorSet = readSetSelector(input, scratch);
-            break;
-          case "multi_valued_long":
-            type = SortField.Type.LONG;
-            selectorNumeric = readNumericSelector(input, scratch);
-            break;
-          case "multi_valued_int":
-            type = SortField.Type.INT;
-            selectorNumeric = readNumericSelector(input, scratch);
-            break;
-          case "multi_valued_double":
-            type = SortField.Type.DOUBLE;
-            selectorNumeric = readNumericSelector(input, scratch);
-            break;
-          case "multi_valued_float":
-            type = SortField.Type.FLOAT;
-            selectorNumeric = readNumericSelector(input, scratch);
-            break;
-          default:
-            throw new CorruptIndexException("unable to parse sort type string: " + typeAsString, input);
-        }
 
         SimpleTextUtil.readLine(input, scratch);
-        assert StringHelper.startsWith(scratch.get(), SI_SORT_REVERSE);
-        final boolean reverse = Boolean.parseBoolean(readString(SI_SORT_REVERSE.length, scratch));
-
-        SimpleTextUtil.readLine(input, scratch);
-        assert StringHelper.startsWith(scratch.get(), SI_SORT_MISSING);
-        final String missingLastAsString = readString(SI_SORT_MISSING.length, scratch);
-        final Object missingValue;
-        switch (type) {
-          case STRING:
-            switch (missingLastAsString) {
-              case "null":
-                missingValue = null;
-                break;
-              case "first":
-                missingValue = SortField.STRING_FIRST;
-                break;
-              case "last":
-                missingValue = SortField.STRING_LAST;
-                break;
-              default:
-                throw new CorruptIndexException("unable to parse missing string: " + typeAsString, input);
-            }
-            break;
-          case LONG:
-            switch (missingLastAsString) {
-              case "null":
-                missingValue = null;
-                break;
-              default:
-                missingValue = Long.parseLong(missingLastAsString);
-                break;
-            }
-            break;
-          case INT:
-            switch (missingLastAsString) {
-              case "null":
-                missingValue = null;
-                break;
-              default:
-                missingValue = Integer.parseInt(missingLastAsString);
-                break;
-            }
-            break;
-          case DOUBLE:
-            switch (missingLastAsString) {
-              case "null":
-                missingValue = null;
-                break;
-              default:
-                missingValue = Double.parseDouble(missingLastAsString);
-                break;
-            }
-            break;
-          case FLOAT:
-            switch (missingLastAsString) {
-              case "null":
-                missingValue = null;
-                break;
-              default:
-                missingValue = Float.parseFloat(missingLastAsString);
-                break;
-            }
-            break;
-          default:
-            throw new AssertionError();
-        }
-        if (selectorSet != null) {
-          sortField[i] = new SortedSetSortField(field, reverse);
-        } else if (selectorNumeric != null) {
-          sortField[i] = new SortedNumericSortField(field, type, reverse);
-        } else {
-          sortField[i] = new SortField(field, type, reverse);
-        }
-        if (missingValue != null) {
-          sortField[i].setMissingValue(missingValue);
-        }
+        assert StringHelper.startsWith(scratch.get(), SI_SORT_BYTES);
+        BytesRef serializedSort = SimpleTextUtil.fromBytesRefString(readString(SI_SORT_BYTES.length, scratch));
+        final ByteArrayDataInput bytes = new ByteArrayDataInput(serializedSort.bytes, serializedSort.offset, serializedSort.length);
+        sortField[i] = SortFieldProvider.forName(provider).readSortField(bytes);
+        assert bytes.eof();
       }
       Sort indexSort = sortField.length == 0 ? null : new Sort(sortField);
 
@@ -313,38 +195,6 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
   private String readString(int offset, BytesRefBuilder scratch) {
     return new String(scratch.bytes(), offset, scratch.length()-offset, StandardCharsets.UTF_8);
   }
-
-  private SortedSetSelector.Type readSetSelector(IndexInput input, BytesRefBuilder scratch) throws IOException {
-    SimpleTextUtil.readLine(input, scratch);
-    assert StringHelper.startsWith(scratch.get(), SI_SELECTOR_TYPE);
-    final String selectorAsString = readString(SI_SELECTOR_TYPE.length, scratch);
-    switch (selectorAsString) {
-      case "min":
-        return SortedSetSelector.Type.MIN;
-      case "middle_min":
-        return SortedSetSelector.Type.MIDDLE_MIN;
-      case "middle_max":
-        return SortedSetSelector.Type.MIDDLE_MAX;
-      case "max":
-        return SortedSetSelector.Type.MAX;
-      default:
-        throw new CorruptIndexException("unable to parse SortedSetSelector type: " + selectorAsString, input);
-    }
-  }
-
-  private SortedNumericSelector.Type readNumericSelector(IndexInput input, BytesRefBuilder scratch) throws IOException {
-    SimpleTextUtil.readLine(input, scratch);
-    assert StringHelper.startsWith(scratch.get(), SI_SELECTOR_TYPE);
-    final String selectorAsString = readString(SI_SELECTOR_TYPE.length, scratch);
-    switch (selectorAsString) {
-      case "min":
-        return SortedNumericSelector.Type.MIN;
-      case "max":
-        return SortedNumericSelector.Type.MAX;
-      default:
-        throw new CorruptIndexException("unable to parse SortedNumericSelector type: " + selectorAsString, input);
-    }
-  }
   
   @Override
   public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
@@ -434,120 +284,42 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
       SimpleTextUtil.writeNewline(output);
       for (int i = 0; i < numSortFields; ++i) {
         final SortField sortField = indexSort.getSort()[i];
+        IndexSorter sorter = sortField.getIndexSorter();
+        if (sorter == null) {
+          throw new IllegalStateException("Cannot serialize sort " + sortField);
+        }
 
-        SimpleTextUtil.write(output, SI_SORT_FIELD);
-        SimpleTextUtil.write(output, sortField.getField(), scratch);
+        SimpleTextUtil.write(output, SI_SORT_NAME);
+        SimpleTextUtil.write(output, sorter.getProviderName(), scratch);
         SimpleTextUtil.writeNewline(output);
 
         SimpleTextUtil.write(output, SI_SORT_TYPE);
-        final String sortTypeString;
-        final SortField.Type sortType;
-        final boolean multiValued;
-        if (sortField instanceof SortedSetSortField) {
-          sortType = SortField.Type.STRING;
-          multiValued = true;
-        } else if (sortField instanceof SortedNumericSortField) {
-          sortType = ((SortedNumericSortField) sortField).getNumericType();
-          multiValued = true;
-        } else {
-          sortType = sortField.getType();
-          multiValued = false;
-        }
-        switch (sortType) {
-          case STRING:
-              if (multiValued) {
-                sortTypeString = "multi_valued_string";
-              } else {
-                sortTypeString = "string";
-              }
-            break;
-          case LONG:
-            if (multiValued) {
-              sortTypeString = "multi_valued_long";
-            } else {
-              sortTypeString = "long";
-            }
-            break;
-          case INT:
-            if (multiValued) {
-              sortTypeString = "multi_valued_int";
-            } else {
-              sortTypeString = "int";
-            }
-            break;
-          case DOUBLE:
-            if (multiValued) {
-              sortTypeString = "multi_valued_double";
-            } else {
-              sortTypeString = "double";
-            }
-            break;
-          case FLOAT:
-            if (multiValued) {
-              sortTypeString = "multi_valued_float";
-            } else {
-              sortTypeString = "float";
-            }
-            break;
-          default:
-            throw new IllegalStateException("Unexpected sort type: " + sortField.getType());
-        }
-        SimpleTextUtil.write(output, sortTypeString, scratch);
-        SimpleTextUtil.writeNewline(output);
-
-        if (sortField instanceof SortedSetSortField) {
-          SortedSetSelector.Type selector = ((SortedSetSortField) sortField).getSelector();
-          final String selectorString;
-          if (selector == SortedSetSelector.Type.MIN) {
-            selectorString = "min";
-          } else if (selector == SortedSetSelector.Type.MIDDLE_MIN) {
-            selectorString = "middle_min";
-          } else if (selector == SortedSetSelector.Type.MIDDLE_MAX) {
-            selectorString = "middle_max";
-          } else if (selector == SortedSetSelector.Type.MAX) {
-            selectorString = "max";
-          } else {
-            throw new IllegalStateException("Unexpected SortedSetSelector type selector: " + selector);
-          }
-          SimpleTextUtil.write(output, SI_SELECTOR_TYPE);
-          SimpleTextUtil.write(output, selectorString, scratch);
-          SimpleTextUtil.writeNewline(output);
-        } else if (sortField instanceof SortedNumericSortField) {
-          SortedNumericSelector.Type selector = ((SortedNumericSortField) sortField).getSelector();
-          final String selectorString;
-          if (selector == SortedNumericSelector.Type.MIN) {
-            selectorString = "min";
-          } else if (selector == SortedNumericSelector.Type.MAX) {
-            selectorString = "max";
-          } else {
-            throw new IllegalStateException("Unexpected SortedNumericSelector type selector: " + selector);
-          }
-          SimpleTextUtil.write(output, SI_SELECTOR_TYPE);
-          SimpleTextUtil.write(output, selectorString, scratch);
-          SimpleTextUtil.writeNewline(output);
-        }
-
-        SimpleTextUtil.write(output, SI_SORT_REVERSE);
-        SimpleTextUtil.write(output, Boolean.toString(sortField.getReverse()), scratch);
+        SimpleTextUtil.write(output, sortField.toString(), scratch);
         SimpleTextUtil.writeNewline(output);
 
-        SimpleTextUtil.write(output, SI_SORT_MISSING);
-        final Object missingValue = sortField.getMissingValue();
-        final String missing;
-        if (missingValue == null) {
-          missing = "null";
-        } else if (missingValue == SortField.STRING_FIRST) {
-          missing = "first";
-        } else if (missingValue == SortField.STRING_LAST) {
-          missing = "last";
-        } else {
-          missing = missingValue.toString();
-        }
-        SimpleTextUtil.write(output, missing, scratch);
+        SimpleTextUtil.write(output, SI_SORT_BYTES);
+        BytesRefOutput b = new BytesRefOutput();
+        SortFieldProvider.write(sortField, b);
+        SimpleTextUtil.write(output, b.bytes.get().toString(), scratch);
         SimpleTextUtil.writeNewline(output);
       }
       
       SimpleTextUtil.writeChecksum(output, scratch);
     }
   }
+
+  static class BytesRefOutput extends DataOutput {
+
+    final BytesRefBuilder bytes = new BytesRefBuilder();
+
+    @Override
+    public void writeByte(byte b) {
+      bytes.append(b);
+    }
+
+    @Override
+    public void writeBytes(byte[] b, int offset, int length) {
+      bytes.append(b, offset, length);
+    }
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
index 07797c6..8b5ca14 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
@@ -57,7 +57,7 @@ public abstract class Codec implements NamedSPILoader.NamedSPI {
     }
     
     // TODO: should we use this, or maybe a system property is better?
-    static Codec defaultCodec = LOADER.lookup("Lucene84");
+    static Codec defaultCodec = LOADER.lookup("Lucene86");
   }
 
   private final String name;
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java
index 91ee2e2..5940a47 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java
@@ -16,399 +16,7 @@
  */
 
 /**
- * Lucene 8.4 file format.
- * 
- * <h2>Apache Lucene - Index File Formats</h2>
- * <div>
- * <ul>
- * <li><a href="#Introduction">Introduction</a></li>
- * <li><a href="#Definitions">Definitions</a>
- *   <ul>
- *   <li><a href="#Inverted_Indexing">Inverted Indexing</a></li>
- *   <li><a href="#Types_of_Fields">Types of Fields</a></li>
- *   <li><a href="#Segments">Segments</a></li>
- *   <li><a href="#Document_Numbers">Document Numbers</a></li>
- *   </ul>
- * </li>
- * <li><a href="#Overview">Index Structure Overview</a></li>
- * <li><a href="#File_Naming">File Naming</a></li>
- * <li><a href="#file-names">Summary of File Extensions</a>
- *   <ul>
- *   <li><a href="#Lock_File">Lock File</a></li>
- *   <li><a href="#History">History</a></li>
- *   <li><a href="#Limitations">Limitations</a></li>
- *   </ul>
- * </li>
- * </ul>
- * </div>
- * <a id="Introduction"></a>
- * <h3>Introduction</h3>
- * <div>
- * <p>This document defines the index file formats used in this version of Lucene.
- * If you are using a different version of Lucene, please consult the copy of
- * <code>docs/</code> that was distributed with
- * the version you are using.</p>
- * <p>This document attempts to provide a high-level definition of the Apache
- * Lucene file formats.</p>
- * </div>
- * <a id="Definitions"></a>
- * <h3>Definitions</h3>
- * <div>
- * <p>The fundamental concepts in Lucene are index, document, field and term.</p>
- * <p>An index contains a sequence of documents.</p>
- * <ul>
- * <li>A document is a sequence of fields.</li>
- * <li>A field is a named sequence of terms.</li>
- * <li>A term is a sequence of bytes.</li>
- * </ul>
- * <p>The same sequence of bytes in two different fields is considered a different 
- * term. Thus terms are represented as a pair: the string naming the field, and the
- * bytes within the field.</p>
- * <a id="Inverted_Indexing"></a>
- * <h4>Inverted Indexing</h4>
- * <p>The index stores statistics about terms in order to make term-based search
- * more efficient. Lucene's index falls into the family of indexes known as an
- * <i>inverted index.</i> This is because it can list, for a term, the documents
- * that contain it. This is the inverse of the natural relationship, in which
- * documents list terms.</p>
- * <a id="Types_of_Fields"></a>
- * <h4>Types of Fields</h4>
- * <p>In Lucene, fields may be <i>stored</i>, in which case their text is stored
- * in the index literally, in a non-inverted manner. Fields that are inverted are
- * called <i>indexed</i>. A field may be both stored and indexed.</p>
- * <p>The text of a field may be <i>tokenized</i> into terms to be indexed, or the
- * text of a field may be used literally as a term to be indexed. Most fields are
- * tokenized, but sometimes it is useful for certain identifier fields to be
- * indexed literally.</p>
- * <p>See the {@link org.apache.lucene.document.Field Field}
- * java docs for more information on Fields.</p>
- * <a id="Segments"></a>
- * <h4>Segments</h4>
- * <p>Lucene indexes may be composed of multiple sub-indexes, or <i>segments</i>.
- * Each segment is a fully independent index, which could be searched separately.
- * Indexes evolve by:</p>
- * <ol>
- * <li>Creating new segments for newly added documents.</li>
- * <li>Merging existing segments.</li>
- * </ol>
- * <p>Searches may involve multiple segments and/or multiple indexes, each index
- * potentially composed of a set of segments.</p>
- * <a id="Document_Numbers"></a>
- * <h4>Document Numbers</h4>
- * <p>Internally, Lucene refers to documents by an integer <i>document number</i>.
- * The first document added to an index is numbered zero, and each subsequent
- * document added gets a number one greater than the previous.</p>
- * <p>Note that a document's number may change, so caution should be taken when
- * storing these numbers outside of Lucene. In particular, numbers may change in
- * the following situations:</p>
- * <ul>
- * <li>
- * <p>The numbers stored in each segment are unique only within the segment, and
- * must be converted before they can be used in a larger context. The standard
- * technique is to allocate each segment a range of values, based on the range of
- * numbers used in that segment. To convert a document number from a segment to an
- * external value, the segment's <i>base</i> document number is added. To convert
- * an external value back to a segment-specific value, the segment is identified
- * by the range that the external value is in, and the segment's base value is
- * subtracted. For example two five document segments might be combined, so that
- * the first segment has a base value of zero, and the second of five. Document
- * three from the second segment would have an external value of eight.</p>
- * </li>
- * <li>
- * <p>When documents are deleted, gaps are created in the numbering. These are
- * eventually removed as the index evolves through merging. Deleted documents are
- * dropped when segments are merged. A freshly-merged segment thus has no gaps in
- * its numbering.</p>
- * </li>
- * </ul>
- * </div>
- * <a id="Overview"></a>
- * <h3>Index Structure Overview</h3>
- * <div>
- * <p>Each segment index maintains the following:</p>
- * <ul>
- * <li>
- * {@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment info}.
- *    This contains metadata about a segment, such as the number of documents,
- *    what files it uses, 
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Field names}. 
- *    This contains the set of field names used in the index.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Stored Field values}. 
- * This contains, for each document, a list of attribute-value pairs, where the attributes 
- * are field names. These are used to store auxiliary information about the document, such as 
- * its title, url, or an identifier to access a database. The set of stored fields are what is 
- * returned for each hit when searching. This is keyed by document number.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term dictionary}. 
- * A dictionary containing all of the terms used in all of the
- * indexed fields of all of the documents. The dictionary also contains the number
- * of documents which contain the term, and pointers to the term's frequency and
- * proximity data.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Frequency data}. 
- * For each term in the dictionary, the numbers of all the
- * documents that contain that term, and the frequency of the term in that
- * document, unless frequencies are omitted ({@link org.apache.lucene.index.IndexOptions#DOCS IndexOptions.DOCS})
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Proximity data}. 
- * For each term in the dictionary, the positions that the
- * term occurs in each document. Note that this will not exist if all fields in
- * all documents omit position data.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene80.Lucene80NormsFormat Normalization factors}.
- * For each field in each document, a value is stored
- * that is multiplied into the score for hits on that field.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vectors}. 
- * For each field in each document, the term vector (sometimes
- * called document vector) may be stored. A term vector consists of term text and
- * term frequency. To add Term Vectors to your index see the 
- * {@link org.apache.lucene.document.Field Field} constructors
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat Per-document values}.
- * Like stored values, these are also keyed by document
- * number, but are generally intended to be loaded into main memory for fast
- * access. Whereas stored values are generally intended for summary results from
- * searches, per-document values are useful for things like scoring factors.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live documents}. 
- * An optional file indicating which documents are live.
- * </li>
- * <li>
- * {@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}.
- * Optional pair of files, recording dimensionally indexed fields, to enable fast
- * numeric range filtering and large numeric values like BigInteger and BigDecimal (1D)
- * and geographic shape intersection (2D, 3D).
- * </li>
- * </ul>
- * <p>Details on each of these are provided in their linked pages.</p>
- * </div>
- * <a id="File_Naming"></a>
- * <h3>File Naming</h3>
- * <div>
- * <p>All files belonging to a segment have the same name with varying extensions.
- * The extensions correspond to the different file formats described below. When
- * using the Compound File format (default for small segments) these files (except
- * for the Segment info file, the Lock file, and Deleted documents file) are collapsed 
- * into a single .cfs file (see below for details)</p>
- * <p>Typically, all segments in an index are stored in a single directory,
- * although this is not required.</p>
- * <p>File names are never re-used. That is, when any file is saved
- * to the Directory it is given a never before used filename. This is achieved
- * using a simple generations approach. For example, the first segments file is
- * segments_1, then segments_2, etc. The generation is a sequential long integer
- * represented in alpha-numeric (base 36) form.</p>
- * </div>
- * <a id="file-names"></a>
- * <h3>Summary of File Extensions</h3>
- * <div>
- * <p>The following table summarizes the names and extensions of the files in
- * Lucene:</p>
- * <table class="padding4" style="border-spacing: 1px; border-collapse: separate">
- * <caption>lucene filenames by extension</caption>
- * <tr>
- * <th>Name</th>
- * <th>Extension</th>
- * <th>Brief Description</th>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.index.SegmentInfos Segments File}</td>
- * <td>segments_N</td>
- * <td>Stores information about a commit point</td>
- * </tr>
- * <tr>
- * <td><a href="#Lock_File">Lock File</a></td>
- * <td>write.lock</td>
- * <td>The Write lock prevents multiple IndexWriters from writing to the same
- * file.</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment Info}</td>
- * <td>.si</td>
- * <td>Stores metadata about a segment</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat Compound File}</td>
- * <td>.cfs, .cfe</td>
- * <td>An optional "virtual" file consisting of all the other index files for
- * systems that frequently run out of file handles.</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Fields}</td>
- * <td>.fnm</td>
- * <td>Stores information about the fields</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Index}</td>
- * <td>.fdx</td>
- * <td>Contains pointers to field data</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Data}</td>
- * <td>.fdt</td>
- * <td>The stored fields for documents</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Dictionary}</td>
- * <td>.tim</td>
- * <td>The term dictionary, stores term info</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Index}</td>
- * <td>.tip</td>
- * <td>The index into the Term Dictionary</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Frequencies}</td>
- * <td>.doc</td>
- * <td>Contains the list of docs which contain each term along with frequency</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Positions}</td>
- * <td>.pos</td>
- * <td>Stores position information about where a term occurs in the index</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Payloads}</td>
- * <td>.pay</td>
- * <td>Stores additional per-position metadata information such as character offsets and user payloads</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene80.Lucene80NormsFormat Norms}</td>
- * <td>.nvd, .nvm</td>
- * <td>Encodes length and boost factors for docs and fields</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat Per-Document Values}</td>
- * <td>.dvd, .dvm</td>
- * <td>Encodes additional scoring factors or other per-document information.</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Index}</td>
- * <td>.tvx</td>
- * <td>Stores offset into the document data file</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Data}</td>
- * <td>.tvd</td>
- * <td>Contains term vector data.</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live Documents}</td>
- * <td>.liv</td>
- * <td>Info about what documents are live</td>
- * </tr>
- * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}</td>
- * <td>.dii, .dim</td>
- * <td>Holds indexed points, if any</td>
- * </tr>
- * </table>
- * </div>
- * <a id="Lock_File"></a>
- * <h3>Lock File</h3>
- * The write lock, which is stored in the index directory by default, is named
- * "write.lock". If the lock directory is different from the index directory then
- * the write lock will be named "XXXX-write.lock" where XXXX is a unique prefix
- * derived from the full path to the index directory. When this file is present, a
- * writer is currently modifying the index (adding or removing documents). This
- * lock file ensures that only one writer is modifying the index at a time.
- * <a id="History"></a>
- * <h3>History</h3>
- * <p>Compatibility notes are provided in this document, describing how file
- * formats have changed from prior versions:</p>
- * <ul>
- * <li>In version 2.1, the file format was changed to allow lock-less commits (ie,
- * no more commit lock). The change is fully backwards compatible: you can open a
- * pre-2.1 index for searching or adding/deleting of docs. When the new segments
- * file is saved (committed), it will be written in the new file format (meaning
- * no specific "upgrade" process is needed). But note that once a commit has
- * occurred, pre-2.1 Lucene will not be able to read the index.</li>
- * <li>In version 2.3, the file format was changed to allow segments to share a
- * single set of doc store (vectors &amp; stored fields) files. This allows for
- * faster indexing in certain cases. The change is fully backwards compatible (in
- * the same way as the lock-less commits change in 2.1).</li>
- * <li>In version 2.4, Strings are now written as true UTF-8 byte sequence, not
- * Java's modified UTF-8. See <a href="http://issues.apache.org/jira/browse/LUCENE-510">
- * LUCENE-510</a> for details.</li>
- * <li>In version 2.9, an optional opaque Map&lt;String,String&gt; CommitUserData
- * may be passed to IndexWriter's commit methods (and later retrieved), which is
- * recorded in the segments_N file. See <a href="http://issues.apache.org/jira/browse/LUCENE-1382">
- * LUCENE-1382</a> for details. Also,
- * diagnostics were added to each segment written recording details about why it
- * was written (due to flush, merge; which OS/JRE was used; etc.). See issue
- * <a href="http://issues.apache.org/jira/browse/LUCENE-1654">LUCENE-1654</a> for details.</li>
- * <li>In version 3.0, compressed fields are no longer written to the index (they
- * can still be read, but on merge the new segment will write them, uncompressed).
- * See issue <a href="http://issues.apache.org/jira/browse/LUCENE-1960">LUCENE-1960</a> 
- * for details.</li>
- * <li>In version 3.1, segments records the code version that created them. See
- * <a href="http://issues.apache.org/jira/browse/LUCENE-2720">LUCENE-2720</a> for details. 
- * Additionally segments track explicitly whether or not they have term vectors. 
- * See <a href="http://issues.apache.org/jira/browse/LUCENE-2811">LUCENE-2811</a> 
- * for details.</li>
- * <li>In version 3.2, numeric fields are written as natively to stored fields
- * file, previously they were stored in text format only.</li>
- * <li>In version 3.4, fields can omit position data while still indexing term
- * frequencies.</li>
- * <li>In version 4.0, the format of the inverted index became extensible via
- * the {@link org.apache.lucene.codecs.Codec Codec} api. Fast per-document storage
- * ({@code DocValues}) was introduced. Normalization factors need no longer be a 
- * single byte, they can be any {@link org.apache.lucene.index.NumericDocValues NumericDocValues}.
- * Terms need not be unicode strings, they can be any byte sequence. Term offsets 
- * can optionally be indexed into the postings lists. Payloads can be stored in the 
- * term vectors.</li>
- * <li>In version 4.1, the format of the postings list changed to use either
- * of FOR compression or variable-byte encoding, depending upon the frequency
- * of the term. Terms appearing only once were changed to inline directly into
- * the term dictionary. Stored fields are compressed by default. </li>
- * <li>In version 4.2, term vectors are compressed by default. DocValues has 
- * a new multi-valued type (SortedSet), that can be used for faceting/grouping/joining
- * on multi-valued fields.</li>
- * <li>In version 4.5, DocValues were extended to explicitly represent missing values.</li>
- * <li>In version 4.6, FieldInfos were extended to support per-field DocValues generation, to 
- * allow updating NumericDocValues fields.</li>
- * <li>In version 4.8, checksum footers were added to the end of each index file 
- * for improved data integrity. Specifically, the last 8 bytes of every index file
- * contain the zlib-crc32 checksum of the file.</li>
- * <li>In version 4.9, DocValues has a new multi-valued numeric type (SortedNumeric)
- * that is suitable for faceting/sorting/analytics.
- * <li>In version 5.4, DocValues have been improved to store more information on disk:
- * addresses for binary fields and ord indexes for multi-valued fields.
- * <li>In version 6.0, Points were added, for multi-dimensional range/distance search.
- * <li>In version 6.2, new Segment info format that reads/writes the index sort, to support index sorting.
- * <li>In version 7.0, DocValues have been improved to better support sparse doc values
- * thanks to an iterator API.</li>
- * <li>In version 8.0, postings have been enhanced to record, for each block of
- * doc ids, the (term freq, normalization factor) pairs that may trigger the
- * maximum score of the block. This information is recorded alongside skip data
- * in order to be able to skip blocks of doc ids if they may not produce high
- * enough scores.
- * Additionally doc values and norms has been extended with jump-tables to make access O(1)
- * instead of O(n), where n is the number of elements to skip when advancing in the data.</li>
- * <li>In version 8.4, postings, positions, offsets and payload lengths have move to a more
- * performant encoding that is vectorized.</li>
- * </ul>
- * <a id="Limitations"></a>
- * <h3>Limitations</h3>
- * <div>
- * <p>Lucene uses a Java <code>int</code> to refer to
- * document numbers, and the index file format uses an <code>Int32</code>
- * on-disk to store document numbers. This is a limitation
- * of both the index file format and the current implementation. Eventually these
- * should be replaced with either <code>UInt64</code> values, or
- * better yet, {@link org.apache.lucene.store.DataOutput#writeVInt VInt} values which have no limit.</p>
- * </div>
+ * Components from the Lucene 8.4 index format.  See {@link org.apache.lucene.codecs.lucene86}
+ * for an overview of the current index format.
  */
 package org.apache.lucene.codecs.lucene84;
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java
similarity index 86%
rename from lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
rename to lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java
index e3f061a..b9116e4 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/Lucene84Codec.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86Codec.java
@@ -14,7 +14,8 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.codecs.lucene84;
+
+package org.apache.lucene.codecs.lucene86;
 
 import java.util.Objects;
 
@@ -33,74 +34,73 @@ import org.apache.lucene.codecs.TermVectorsFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
-import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
 import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat;
 import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat;
 import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
-import org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat;
 import org.apache.lucene.codecs.lucene80.Lucene80NormsFormat;
+import org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat;
 import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
 import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
 
 /**
- * Implements the Lucene 8.4 index format, with configurable per-field postings
+ * Implements the Lucene 8.6 index format, with configurable per-field postings
  * and docvalues formats.
  * <p>
  * If you want to reuse functionality of this codec in another codec, extend
  * {@link FilterCodec}.
  *
- * @see org.apache.lucene.codecs.lucene84 package documentation for file format details.
+ * @see org.apache.lucene.codecs.lucene86 package documentation for file format details.
  *
  * @lucene.experimental
  */
-public class Lucene84Codec extends Codec {
+public class Lucene86Codec extends Codec {
   private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat();
   private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat();
-  private final SegmentInfoFormat segmentInfosFormat = new Lucene70SegmentInfoFormat();
+  private final SegmentInfoFormat segmentInfosFormat = new Lucene86SegmentInfoFormat();
   private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
   private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
   private final PostingsFormat defaultFormat;
-  
+
   private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() {
     @Override
     public PostingsFormat getPostingsFormatForField(String field) {
-      return Lucene84Codec.this.getPostingsFormatForField(field);
+      return Lucene86Codec.this.getPostingsFormatForField(field);
     }
   };
-  
+
   private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() {
     @Override
     public DocValuesFormat getDocValuesFormatForField(String field) {
-      return Lucene84Codec.this.getDocValuesFormatForField(field);
+      return Lucene86Codec.this.getDocValuesFormatForField(field);
     }
   };
-  
+
   private final StoredFieldsFormat storedFieldsFormat;
 
-  /** 
+  /**
    * Instantiates a new codec.
    */
-  public Lucene84Codec() {
-    this(Mode.BEST_SPEED);
+  public Lucene86Codec() {
+    this(Lucene50StoredFieldsFormat.Mode.BEST_SPEED);
   }
-  
-  /** 
+
+  /**
    * Instantiates a new codec, specifying the stored fields compression
    * mode to use.
    * @param mode stored fields compression mode to use for newly
    *             flushed/merged segments.
    */
-  public Lucene84Codec(Mode mode) {
-    super("Lucene84");
+  public Lucene86Codec(Lucene50StoredFieldsFormat.Mode mode) {
+    super("Lucene86");
     this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode));
     this.defaultFormat = new Lucene84PostingsFormat();
   }
-  
+
   @Override
   public final StoredFieldsFormat storedFieldsFormat() {
     return storedFieldsFormat;
   }
-  
+
   @Override
   public final TermVectorsFormat termVectorsFormat() {
     return vectorsFormat;
@@ -110,17 +110,17 @@ public class Lucene84Codec extends Codec {
   public final PostingsFormat postingsFormat() {
     return postingsFormat;
   }
-  
+
   @Override
   public final FieldInfosFormat fieldInfosFormat() {
     return fieldInfosFormat;
   }
-  
+
   @Override
   public final SegmentInfoFormat segmentInfoFormat() {
     return segmentInfosFormat;
   }
-  
+
   @Override
   public final LiveDocsFormat liveDocsFormat() {
     return liveDocsFormat;
@@ -136,32 +136,32 @@ public class Lucene84Codec extends Codec {
     return new Lucene60PointsFormat();
   }
 
-  /** Returns the postings format that should be used for writing 
+  /** Returns the postings format that should be used for writing
    *  new segments of <code>field</code>.
-   *  
+   *
    *  The default implementation always returns "Lucene84".
    *  <p>
-   *  <b>WARNING:</b> if you subclass, you are responsible for index 
-   *  backwards compatibility: future version of Lucene are only 
-   *  guaranteed to be able to read the default implementation. 
+   *  <b>WARNING:</b> if you subclass, you are responsible for index
+   *  backwards compatibility: future version of Lucene are only
+   *  guaranteed to be able to read the default implementation.
    */
   public PostingsFormat getPostingsFormatForField(String field) {
     return defaultFormat;
   }
-  
-  /** Returns the docvalues format that should be used for writing 
+
+  /** Returns the docvalues format that should be used for writing
    *  new segments of <code>field</code>.
-   *  
+   *
    *  The default implementation always returns "Lucene80".
    *  <p>
-   *  <b>WARNING:</b> if you subclass, you are responsible for index 
-   *  backwards compatibility: future version of Lucene are only 
-   *  guaranteed to be able to read the default implementation. 
+   *  <b>WARNING:</b> if you subclass, you are responsible for index
+   *  backwards compatibility: future version of Lucene are only
+   *  guaranteed to be able to read the default implementation.
    */
   public DocValuesFormat getDocValuesFormatForField(String field) {
     return defaultDVFormat;
   }
-  
+
   @Override
   public final DocValuesFormat docValuesFormat() {
     return docValuesFormat;
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java
new file mode 100644
index 0000000..b2bcdc2
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/Lucene86SegmentInfoFormat.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.codecs.lucene86;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.codecs.CodecUtil;
+import org.apache.lucene.codecs.SegmentInfoFormat;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexFileNames;
+import org.apache.lucene.index.IndexSorter;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.SegmentInfo;
+import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.index.SortFieldProvider;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.store.ChecksumIndexInput;
+import org.apache.lucene.store.DataInput;
+import org.apache.lucene.store.DataOutput;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.util.Version;
+
+/**
+ * Lucene 8.6 Segment info format.
+ * <p>
+ * Files:
+ * <ul>
+ *   <li><code>.si</code>: Header, SegVersion, SegSize, IsCompoundFile, Diagnostics, Files, Attributes, IndexSort, Footer
+ * </ul>
+ * Data types:
+ * <ul>
+ *   <li>Header --&gt; {@link CodecUtil#writeIndexHeader IndexHeader}</li>
+ *   <li>SegSize --&gt; {@link DataOutput#writeInt Int32}</li>
+ *   <li>SegVersion --&gt; {@link DataOutput#writeString String}</li>
+ *   <li>SegMinVersion --&gt; {@link DataOutput#writeString String}</li>
+ *   <li>Files --&gt; {@link DataOutput#writeSetOfStrings Set&lt;String&gt;}</li>
+ *   <li>Diagnostics,Attributes --&gt; {@link DataOutput#writeMapOfStrings Map&lt;String,String&gt;}</li>
+ *   <li>IsCompoundFile --&gt; {@link DataOutput#writeByte Int8}</li>
+ *   <li>IndexSort --&gt; {@link DataOutput#writeVInt Int32} count, followed by {@code count} SortField</li>
+ *   <li>SortField --&gt; {@link DataOutput#writeString String} sort class, followed by a per-sort bytestream
+ *    (see {@link SortFieldProvider#readSortField(DataInput)})
+ *   <li>Footer --&gt; {@link CodecUtil#writeFooter CodecFooter}</li>
+ * </ul>
+ * Field Descriptions:
+ * <ul>
+ *   <li>SegVersion is the code version that created the segment.</li>
+ *   <li>SegMinVersion is the minimum code version that contributed documents to the segment.</li>
+ *   <li>SegSize is the number of documents contained in the segment index.</li>
+ *   <li>IsCompoundFile records whether the segment is written as a compound file or
+ *       not. If this is -1, the segment is not a compound file. If it is 1, the segment
+ *       is a compound file.</li>
+ *   <li>The Diagnostics Map is privately written by {@link IndexWriter}, as a debugging aid,
+ *       for each segment it creates. It includes metadata like the current Lucene
+ *       version, OS, Java version, why the segment was created (merge, flush,
+ *       addIndexes), etc.</li>
+ *   <li>Files is a list of files referred to by this segment.</li>
+ * </ul>
+ *
+ * @see SegmentInfos
+ * @lucene.experimental
+ */
+public class Lucene86SegmentInfoFormat extends SegmentInfoFormat {
+
+  /** Sole constructor. */
+  public Lucene86SegmentInfoFormat() {
+  }
+
+  @Override
+  public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
+    final String fileName = IndexFileNames.segmentFileName(segment, "", SI_EXTENSION);
+    try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
+      Throwable priorE = null;
+      SegmentInfo si = null;
+      try {
+        int format = CodecUtil.checkIndexHeader(input, CODEC_NAME,
+            VERSION_START,
+            VERSION_CURRENT,
+            segmentID, "");
+        final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
+        byte hasMinVersion = input.readByte();
+        final Version minVersion;
+        switch (hasMinVersion) {
+          case 0:
+            minVersion = null;
+            break;
+          case 1:
+            minVersion = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
+            break;
+          default:
+            throw new CorruptIndexException("Illegal boolean value " + hasMinVersion, input);
+        }
+
+        final int docCount = input.readInt();
+        if (docCount < 0) {
+          throw new CorruptIndexException("invalid docCount: " + docCount, input);
+        }
+        final boolean isCompoundFile = input.readByte() == SegmentInfo.YES;
+
+        final Map<String,String> diagnostics = input.readMapOfStrings();
+        final Set<String> files = input.readSetOfStrings();
+        final Map<String,String> attributes = input.readMapOfStrings();
+
+        int numSortFields = input.readVInt();
+        Sort indexSort;
+        if (numSortFields > 0) {
+          SortField[] sortFields = new SortField[numSortFields];
+          for(int i=0;i<numSortFields;i++) {
+            String name = input.readString();
+            sortFields[i] = SortFieldProvider.forName(name).readSortField(input);
+          }
+          indexSort = new Sort(sortFields);
+        } else if (numSortFields < 0) {
+          throw new CorruptIndexException("invalid index sort field count: " + numSortFields, input);
+        } else {
+          indexSort = null;
+        }
+
+        si = new SegmentInfo(dir, version, minVersion, segment, docCount, isCompoundFile, null, diagnostics, segmentID, attributes, indexSort);
+        si.setFiles(files);
+      } catch (Throwable exception) {
+        priorE = exception;
+      } finally {
+        CodecUtil.checkFooter(input, priorE);
+      }
+      return si;
+    }
+  }
+
+  @Override
+  public void write(Directory dir, SegmentInfo si, IOContext ioContext) throws IOException {
+    final String fileName = IndexFileNames.segmentFileName(si.name, "", SI_EXTENSION);
+
+    try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
+      // Only add the file once we've successfully created it, else IFD assert can trip:
+      si.addFile(fileName);
+      CodecUtil.writeIndexHeader(output,
+          CODEC_NAME,
+          VERSION_CURRENT,
+          si.getId(),
+          "");
+      Version version = si.getVersion();
+      if (version.major < 7) {
+        throw new IllegalArgumentException("invalid major version: should be >= 7 but got: " + version.major + " segment=" + si);
+      }
+      // Write the Lucene version that created this segment, since 3.1
+      output.writeInt(version.major);
+      output.writeInt(version.minor);
+      output.writeInt(version.bugfix);
+
+      // Write the min Lucene version that contributed docs to the segment, since 7.0
+      if (si.getMinVersion() != null) {
+        output.writeByte((byte) 1);
+        Version minVersion = si.getMinVersion();
+        output.writeInt(minVersion.major);
+        output.writeInt(minVersion.minor);
+        output.writeInt(minVersion.bugfix);
+      } else {
+        output.writeByte((byte) 0);
+      }
+
+      assert version.prerelease == 0;
+      output.writeInt(si.maxDoc());
+
+      output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
+      output.writeMapOfStrings(si.getDiagnostics());
+      Set<String> files = si.files();
+      for (String file : files) {
+        if (!IndexFileNames.parseSegmentName(file).equals(si.name)) {
+          throw new IllegalArgumentException("invalid files: expected segment=" + si.name + ", got=" + files);
+        }
+      }
+      output.writeSetOfStrings(files);
+      output.writeMapOfStrings(si.getAttributes());
+
+      Sort indexSort = si.getIndexSort();
+      int numSortFields = indexSort == null ? 0 : indexSort.getSort().length;
+      output.writeVInt(numSortFields);
+      for (int i = 0; i < numSortFields; ++i) {
+        SortField sortField = indexSort.getSort()[i];
+        IndexSorter sorter = sortField.getIndexSorter();
+        if (sorter == null) {
+          throw new IllegalArgumentException("cannot serialize SortField " + sortField);
+        }
+        output.writeString(sorter.getProviderName());
+        SortFieldProvider.write(sortField, output);
+      }
+
+      CodecUtil.writeFooter(output);
+    }
+  }
+
+  /** File extension used to store {@link SegmentInfo}. */
+  public final static String SI_EXTENSION = "si";
+  static final String CODEC_NAME = "Lucene86SegmentInfo";
+  static final int VERSION_START = 0;
+  static final int VERSION_CURRENT = VERSION_START;
+}
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java
similarity index 95%
copy from lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java
copy to lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java
index 91ee2e2..ed6b96c 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene84/package-info.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene86/package-info.java
@@ -16,8 +16,8 @@
  */
 
 /**
- * Lucene 8.4 file format.
- * 
+ * Lucene 8.6 file format.
+ *
  * <h2>Apache Lucene - Index File Formats</h2>
  * <div>
  * <ul>
@@ -61,7 +61,7 @@
  * <li>A field is a named sequence of terms.</li>
  * <li>A term is a sequence of bytes.</li>
  * </ul>
- * <p>The same sequence of bytes in two different fields is considered a different 
+ * <p>The same sequence of bytes in two different fields is considered a different
  * term. Thus terms are represented as a pair: the string naming the field, and the
  * bytes within the field.</p>
  * <a id="Inverted_Indexing"></a>
@@ -128,36 +128,36 @@
  * <p>Each segment index maintains the following:</p>
  * <ul>
  * <li>
- * {@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment info}.
+ * {@link org.apache.lucene.codecs.lucene86.Lucene86SegmentInfoFormat Segment info}.
  *    This contains metadata about a segment, such as the number of documents,
- *    what files it uses, 
+ *    what files it uses, and information about how the segment is sorted
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Field names}. 
+ * {@link org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat Field names}.
  *    This contains the set of field names used in the index.
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Stored Field values}. 
- * This contains, for each document, a list of attribute-value pairs, where the attributes 
- * are field names. These are used to store auxiliary information about the document, such as 
- * its title, url, or an identifier to access a database. The set of stored fields are what is 
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Stored Field values}.
+ * This contains, for each document, a list of attribute-value pairs, where the attributes
+ * are field names. These are used to store auxiliary information about the document, such as
+ * its title, url, or an identifier to access a database. The set of stored fields are what is
  * returned for each hit when searching. This is keyed by document number.
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term dictionary}. 
+ * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term dictionary}.
  * A dictionary containing all of the terms used in all of the
  * indexed fields of all of the documents. The dictionary also contains the number
  * of documents which contain the term, and pointers to the term's frequency and
  * proximity data.
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Frequency data}. 
+ * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Frequency data}.
  * For each term in the dictionary, the numbers of all the
  * documents that contain that term, and the frequency of the term in that
  * document, unless frequencies are omitted ({@link org.apache.lucene.index.IndexOptions#DOCS IndexOptions.DOCS})
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Proximity data}. 
+ * {@link org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat Term Proximity data}.
  * For each term in the dictionary, the positions that the
  * term occurs in each document. Note that this will not exist if all fields in
  * all documents omit position data.
@@ -168,10 +168,10 @@
  * that is multiplied into the score for hits on that field.
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vectors}. 
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vectors}.
  * For each field in each document, the term vector (sometimes
  * called document vector) may be stored. A term vector consists of term text and
- * term frequency. To add Term Vectors to your index see the 
+ * term frequency. To add Term Vectors to your index see the
  * {@link org.apache.lucene.document.Field Field} constructors
  * </li>
  * <li>
@@ -182,7 +182,7 @@
  * searches, per-document values are useful for things like scoring factors.
  * </li>
  * <li>
- * {@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live documents}. 
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live documents}.
  * An optional file indicating which documents are live.
  * </li>
  * <li>
@@ -200,7 +200,7 @@
  * <p>All files belonging to a segment have the same name with varying extensions.
  * The extensions correspond to the different file formats described below. When
  * using the Compound File format (default for small segments) these files (except
- * for the Segment info file, the Lock file, and Deleted documents file) are collapsed 
+ * for the Segment info file, the Lock file, and Deleted documents file) are collapsed
  * into a single .cfs file (see below for details)</p>
  * <p>Typically, all segments in an index are stored in a single directory,
  * although this is not required.</p>
@@ -234,7 +234,7 @@
  * file.</td>
  * </tr>
  * <tr>
- * <td>{@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment Info}</td>
+ * <td>{@link org.apache.lucene.codecs.lucene86.Lucene86SegmentInfoFormat Segment Info}</td>
  * <td>.si</td>
  * <td>Stores metadata about a segment</td>
  * </tr>
@@ -351,12 +351,12 @@
  * <a href="http://issues.apache.org/jira/browse/LUCENE-1654">LUCENE-1654</a> for details.</li>
  * <li>In version 3.0, compressed fields are no longer written to the index (they
  * can still be read, but on merge the new segment will write them, uncompressed).
- * See issue <a href="http://issues.apache.org/jira/browse/LUCENE-1960">LUCENE-1960</a> 
+ * See issue <a href="http://issues.apache.org/jira/browse/LUCENE-1960">LUCENE-1960</a>
  * for details.</li>
  * <li>In version 3.1, segments records the code version that created them. See
- * <a href="http://issues.apache.org/jira/browse/LUCENE-2720">LUCENE-2720</a> for details. 
- * Additionally segments track explicitly whether or not they have term vectors. 
- * See <a href="http://issues.apache.org/jira/browse/LUCENE-2811">LUCENE-2811</a> 
+ * <a href="http://issues.apache.org/jira/browse/LUCENE-2720">LUCENE-2720</a> for details.
+ * Additionally segments track explicitly whether or not they have term vectors.
+ * See <a href="http://issues.apache.org/jira/browse/LUCENE-2811">LUCENE-2811</a>
  * for details.</li>
  * <li>In version 3.2, numeric fields are written as natively to stored fields
  * file, previously they were stored in text format only.</li>
@@ -364,22 +364,22 @@
  * frequencies.</li>
  * <li>In version 4.0, the format of the inverted index became extensible via
  * the {@link org.apache.lucene.codecs.Codec Codec} api. Fast per-document storage
- * ({@code DocValues}) was introduced. Normalization factors need no longer be a 
+ * ({@code DocValues}) was introduced. Normalization factors need no longer be a
  * single byte, they can be any {@link org.apache.lucene.index.NumericDocValues NumericDocValues}.
- * Terms need not be unicode strings, they can be any byte sequence. Term offsets 
- * can optionally be indexed into the postings lists. Payloads can be stored in the 
+ * Terms need not be unicode strings, they can be any byte sequence. Term offsets
+ * can optionally be indexed into the postings lists. Payloads can be stored in the
  * term vectors.</li>
  * <li>In version 4.1, the format of the postings list changed to use either
  * of FOR compression or variable-byte encoding, depending upon the frequency
  * of the term. Terms appearing only once were changed to inline directly into
  * the term dictionary. Stored fields are compressed by default. </li>
- * <li>In version 4.2, term vectors are compressed by default. DocValues has 
+ * <li>In version 4.2, term vectors are compressed by default. DocValues has
  * a new multi-valued type (SortedSet), that can be used for faceting/grouping/joining
  * on multi-valued fields.</li>
  * <li>In version 4.5, DocValues were extended to explicitly represent missing values.</li>
- * <li>In version 4.6, FieldInfos were extended to support per-field DocValues generation, to 
+ * <li>In version 4.6, FieldInfos were extended to support per-field DocValues generation, to
  * allow updating NumericDocValues fields.</li>
- * <li>In version 4.8, checksum footers were added to the end of each index file 
+ * <li>In version 4.8, checksum footers were added to the end of each index file
  * for improved data integrity. Specifically, the last 8 bytes of every index file
  * contain the zlib-crc32 checksum of the file.</li>
  * <li>In version 4.9, DocValues has a new multi-valued numeric type (SortedNumeric)
@@ -399,6 +399,8 @@
  * instead of O(n), where n is the number of elements to skip when advancing in the data.</li>
  * <li>In version 8.4, postings, positions, offsets and payload lengths have move to a more
  * performant encoding that is vectorized.</li>
+ * <li>In version 8.6, index sort serialization is delegated to the sorts themselves, to
+ * allow user-defined sorts to be used</li>
  * </ul>
  * <a id="Limitations"></a>
  * <h3>Limitations</h3>
@@ -411,4 +413,4 @@
  * better yet, {@link org.apache.lucene.store.DataOutput#writeVInt VInt} values which have no limit.</p>
  * </div>
  */
-package org.apache.lucene.codecs.lucene84;
+package org.apache.lucene.codecs.lucene86;
diff --git a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
index 1aeab4c..e213a48 100644
--- a/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/BinaryDocValuesWriter.java
@@ -21,7 +21,6 @@ import java.io.IOException;
 
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.SortField;
 import org.apache.lucene.store.DataInput;
 import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.util.ArrayUtil;
@@ -37,7 +36,7 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
 
 /** Buffers up pending byte[] per doc, then flushes when
  *  segment flushes. */
-class BinaryDocValuesWriter extends DocValuesWriter {
+class BinaryDocValuesWriter extends DocValuesWriter<BinaryDocValues> {
 
   /** Maximum length for a binary field. */
   private static final int MAX_LENGTH = ArrayUtil.MAX_ARRAY_LENGTH;
@@ -56,6 +55,8 @@ class BinaryDocValuesWriter extends DocValuesWriter {
   private int lastDocID = -1;
   private int maxLength = 0;
 
+  private PackedLongValues finalLengths;
+
   public BinaryDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed) {
     this.fieldInfo = fieldInfo;
     this.bytes = new PagedBytes(BLOCK_BITS);
@@ -98,10 +99,6 @@ class BinaryDocValuesWriter extends DocValuesWriter {
     bytesUsed = newBytesUsed;
   }
 
-  @Override
-  public void finish(int maxDoc) {
-  }
-
   private SortingLeafReader.CachedBinaryDVs sortDocValues(int maxDoc, Sorter.DocMap sortMap, BinaryDocValues oldValues) throws IOException {
     FixedBitSet docsWithField = new FixedBitSet(maxDoc);
     BytesRef[] values = new BytesRef[maxDoc];
@@ -118,18 +115,23 @@ class BinaryDocValuesWriter extends DocValuesWriter {
   }
 
   @Override
-  Sorter.DocComparator getDocComparator(int numDoc, SortField sortField) throws IOException {
-    throw new IllegalArgumentException("It is forbidden to sort on a binary field");
+  BinaryDocValues getDocValues() {
+    if (finalLengths == null) {
+      finalLengths = this.lengths.build();
+    }
+    return new BufferedBinaryDocValues(finalLengths, maxLength, bytes.getDataInput(), docsWithField.iterator());
   }
 
   @Override
   public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer dvConsumer) throws IOException {
     bytes.freeze(false);
-    final PackedLongValues lengths = this.lengths.build();
+    if (finalLengths == null) {
+      finalLengths = this.lengths.build();
+    }
     final SortingLeafReader.CachedBinaryDVs sorted;
     if (sortMap != null) {
       sorted = sortDocValues(state.segmentInfo.maxDoc(), sortMap,
-          new BufferedBinaryDocValues(lengths, maxLength, bytes.getDataInput(), docsWithField.iterator()));
+          new BufferedBinaryDocValues(finalLengths, maxLength, bytes.getDataInput(), docsWithField.iterator()));
     } else {
       sorted = null;
     }
@@ -141,7 +143,7 @@ class BinaryDocValuesWriter extends DocValuesWriter {
                                     throw new IllegalArgumentException("wrong fieldInfo");
                                   }
                                   if (sorted == null) {
-                                    return new BufferedBinaryDocValues(lengths, maxLength, bytes.getDataInput(), docsWithField.iterator());
+                                    return new BufferedBinaryDocValues(finalLengths, maxLength, bytes.getDataInput(), docsWithField.iterator());
                                   } else {
                                     return new SortingLeafReader.SortingBinaryDocValues(sorted);
                                   }
@@ -200,9 +202,4 @@ class BinaryDocValuesWriter extends DocValuesWriter {
       return value.get();
     }
   }
-
-  @Override
-  DocIdSetIterator getDocIdSet() {
-    return docsWithField.iterator();
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
index 03cabc1..cdd6ebe 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DefaultIndexingChain.java
@@ -22,10 +22,9 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
+import java.util.Objects;
 
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.codecs.DocValuesConsumer;
@@ -39,8 +38,6 @@ import org.apache.lucene.document.FieldType;
 import org.apache.lucene.search.DocIdSetIterator;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.SortedNumericSortField;
-import org.apache.lucene.search.SortedSetSortField;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.util.ArrayUtil;
@@ -75,8 +72,6 @@ final class DefaultIndexingChain extends DocConsumer {
   // Holds fields seen in each document
   private PerField[] fields = new PerField[1];
 
-  private final Set<String> finishedDocValues = new HashSet<>();
-
   public DefaultIndexingChain(DocumentsWriterPerThread docWriter) {
     this.docWriter = docWriter;
     this.fieldInfos = docWriter.getFieldInfosBuilder();
@@ -94,29 +89,96 @@ final class DefaultIndexingChain extends DocConsumer {
     termsHash = new FreqProxTermsWriter(docWriter, termVectorsWriter);
   }
 
+  private LeafReader getDocValuesLeafReader() {
+    return new DocValuesLeafReader() {
+      @Override
+      public NumericDocValues getNumericDocValues(String field) throws IOException {
+        PerField pf = getPerField(field);
+        if (pf == null) {
+          return null;
+        }
+        if (pf.fieldInfo.getDocValuesType() == DocValuesType.NUMERIC) {
+          return (NumericDocValues) pf.docValuesWriter.getDocValues();
+        }
+        return null;
+      }
+
+      @Override
+      public BinaryDocValues getBinaryDocValues(String field) throws IOException {
+        PerField pf = getPerField(field);
+        if (pf == null) {
+          return null;
+        }
+        if (pf.fieldInfo.getDocValuesType() == DocValuesType.BINARY) {
+          return (BinaryDocValues) pf.docValuesWriter.getDocValues();
+        }
+        return null;
+      }
+
+      @Override
+      public SortedDocValues getSortedDocValues(String field) throws IOException {
+        PerField pf = getPerField(field);
+        if (pf == null) {
+          return null;
+        }
+        if (pf.fieldInfo.getDocValuesType() == DocValuesType.SORTED) {
+          return (SortedDocValues) pf.docValuesWriter.getDocValues();
+        }
+        return null;
+      }
+
+      @Override
+      public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException {
+        PerField pf = getPerField(field);
+        if (pf == null) {
+          return null;
+        }
+        if (pf.fieldInfo.getDocValuesType() == DocValuesType.SORTED_NUMERIC) {
+          return (SortedNumericDocValues) pf.docValuesWriter.getDocValues();
+        }
+        return null;
+      }
+
+      @Override
+      public SortedSetDocValues getSortedSetDocValues(String field) throws IOException {
+        PerField pf = getPerField(field);
+        if (pf == null) {
+          return null;
+        }
+        if (pf.fieldInfo.getDocValuesType() == DocValuesType.SORTED_SET) {
+          return (SortedSetDocValues) pf.docValuesWriter.getDocValues();
+        }
+        return null;
+      }
+
+      @Override
+      public FieldInfos getFieldInfos() {
+        return fieldInfos.finish();
+      }
+
+    };
+  }
+
   private Sorter.DocMap maybeSortSegment(SegmentWriteState state) throws IOException {
     Sort indexSort = state.segmentInfo.getIndexSort();
     if (indexSort == null) {
       return null;
     }
 
-    List<Sorter.DocComparator> comparators = new ArrayList<>();
+    LeafReader docValuesReader = getDocValuesLeafReader();
+
+    List<IndexSorter.DocComparator> comparators = new ArrayList<>();
     for (int i = 0; i < indexSort.getSort().length; i++) {
       SortField sortField = indexSort.getSort()[i];
-      PerField perField = getPerField(sortField.getField());
-      if (perField != null && perField.docValuesWriter != null &&
-          finishedDocValues.contains(perField.fieldInfo.name) == false) {
-          perField.docValuesWriter.finish(state.segmentInfo.maxDoc());
-          Sorter.DocComparator cmp = perField.docValuesWriter.getDocComparator(state.segmentInfo.maxDoc(), sortField);
-          comparators.add(cmp);
-          finishedDocValues.add(perField.fieldInfo.name);
-      } else {
-        // safe to ignore, sort field with no values or already seen before
+      IndexSorter sorter = sortField.getIndexSorter();
+      if (sorter == null) {
+        throw new UnsupportedOperationException("Cannot sort index using sort field " + sortField);
       }
+      comparators.add(sorter.getDocComparator(docValuesReader, state.segmentInfo.maxDoc()));
     }
     Sorter sorter = new Sorter(indexSort);
     // returns null if the documents are already sorted
-    return sorter.sort(state.segmentInfo.maxDoc(), comparators.toArray(new Sorter.DocComparator[comparators.size()]));
+    return sorter.sort(state.segmentInfo.maxDoc(), comparators.toArray(IndexSorter.DocComparator[]::new));
   }
 
   @Override
@@ -255,10 +317,6 @@ final class DefaultIndexingChain extends DocConsumer {
               DocValuesFormat fmt = state.segmentInfo.getCodec().docValuesFormat();
               dvConsumer = fmt.fieldsConsumer(state);
             }
-
-            if (finishedDocValues.contains(perField.fieldInfo.name) == false) {
-              perField.docValuesWriter.finish(maxDoc);
-            }
             perField.docValuesWriter.flush(state, sortMap, dvConsumer);
             perField.docValuesWriter = null;
           } else if (perField.fieldInfo.getDocValuesType() != DocValuesType.NONE) {
@@ -527,45 +585,58 @@ final class DefaultIndexingChain extends DocConsumer {
     fp.pointValuesWriter.addPackedValue(docState.docID, field.binaryValue());
   }
 
-  private void validateIndexSortDVType(Sort indexSort, String fieldName, DocValuesType dvType) {
+  private void validateIndexSortDVType(Sort indexSort, String fieldToValidate, DocValuesType dvType) throws IOException {
     for (SortField sortField : indexSort.getSort()) {
-      if (sortField.getField().equals(fieldName)) {
-        switch (dvType) {
-          case NUMERIC:
-            if (sortField.getType().equals(SortField.Type.INT) == false &&
-                  sortField.getType().equals(SortField.Type.LONG) == false &&
-                  sortField.getType().equals(SortField.Type.FLOAT) == false &&
-                  sortField.getType().equals(SortField.Type.DOUBLE) == false) {
-              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
-            }
-            break;
+      IndexSorter sorter = sortField.getIndexSorter();
+      if (sorter == null) {
+        throw new IllegalStateException("Cannot sort index with sort order " + sortField);
+      }
+      sorter.getDocComparator(new DocValuesLeafReader() {
+        @Override
+        public NumericDocValues getNumericDocValues(String field) {
+          if (Objects.equals(field, fieldToValidate) && dvType != DocValuesType.NUMERIC) {
+            throw new IllegalArgumentException("SortField " + sortField + " expected field [" + field + "] to be NUMERIC but it is [" + dvType + "]");
+          }
+          return DocValues.emptyNumeric();
+        }
 
-          case BINARY:
-            throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+        @Override
+        public BinaryDocValues getBinaryDocValues(String field) {
+          if (Objects.equals(field, fieldToValidate) && dvType != DocValuesType.BINARY) {
+            throw new IllegalArgumentException("SortField " + sortField + " expected field [" + field + "] to be BINARY but it is [" + dvType + "]");
+          }
+          return DocValues.emptyBinary();
+        }
 
-          case SORTED:
-            if (sortField.getType().equals(SortField.Type.STRING) == false) {
-              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
-            }
-            break;
+        @Override
+        public SortedDocValues getSortedDocValues(String field) {
+          if (Objects.equals(field, fieldToValidate) && dvType != DocValuesType.SORTED) {
+            throw new IllegalArgumentException("SortField " + sortField + " expected field [" + field + "] to be SORTED but it is [" + dvType + "]");
+          }
+          return DocValues.emptySorted();
+        }
 
-          case SORTED_NUMERIC:
-            if (sortField instanceof SortedNumericSortField == false) {
-              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
-            }
-            break;
+        @Override
+        public SortedNumericDocValues getSortedNumericDocValues(String field) {
+          if (Objects.equals(field, fieldToValidate) && dvType != DocValuesType.SORTED_NUMERIC) {
+            throw new IllegalArgumentException("SortField " + sortField + " expected field [" + field + "] to be SORTED_NUMERIC but it is [" + dvType + "]");
+          }
+          return DocValues.emptySortedNumeric();
+        }
 
-          case SORTED_SET:
-            if (sortField instanceof SortedSetSortField == false) {
-              throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
-            }
-            break;
+        @Override
+        public SortedSetDocValues getSortedSetDocValues(String field) {
+          if (Objects.equals(field, fieldToValidate) && dvType != DocValuesType.SORTED_SET) {
+            throw new IllegalArgumentException("SortField " + sortField + " expected field [" + field + "] to be SORTED_SET but it is [" + dvType + "]");
+          }
+          return DocValues.emptySortedSet();
+        }
 
-          default:
-            throw new IllegalArgumentException("invalid doc value type:" + dvType + " for sortField:" + sortField);
+        @Override
+        public FieldInfos getFieldInfos() {
+          throw new UnsupportedOperationException();
         }
-        break;
-      }
+      }, 0);
     }
   }
 
@@ -581,8 +652,8 @@ final class DefaultIndexingChain extends DocConsumer {
         validateIndexSortDVType(indexSort, fp.fieldInfo.name, dvType);
       }
       fieldInfos.globalFieldNumbers.setDocValuesType(fp.fieldInfo.number, fp.fieldInfo.name, dvType);
-
     }
+
     fp.fieldInfo.setDocValuesType(dvType);
 
     int docID = docState.docID;
@@ -713,7 +784,7 @@ final class DefaultIndexingChain extends DocConsumer {
 
     // Non-null if this field ever had doc values in this
     // segment:
-    DocValuesWriter docValuesWriter;
+    DocValuesWriter<?> docValuesWriter;
 
     // Non-null if this field ever had points in this segment:
     PointValuesWriter pointValuesWriter;
@@ -907,7 +978,7 @@ final class DefaultIndexingChain extends DocConsumer {
           return null;
         }
 
-        return perField.docValuesWriter.getDocIdSet();
+        return perField.docValuesWriter.getDocValues();
       }
     }
     return null;
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java
new file mode 100644
index 0000000..93b7f49
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesLeafReader.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.index;
+
+import java.io.IOException;
+
+import org.apache.lucene.util.Bits;
+
+abstract class DocValuesLeafReader extends LeafReader {
+  @Override
+  public final CacheHelper getCoreCacheHelper() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final Terms terms(String field) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final NumericDocValues getNormValues(String field) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final Bits getLiveDocs() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final PointValues getPointValues(String field) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final void checkIntegrity() throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final LeafMetaData getMetaData() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final Fields getTermVectors(int docID) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final int numDocs() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final int maxDoc() {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final void document(int docID, StoredFieldVisitor visitor) throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  protected final void doClose() throws IOException {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public final CacheHelper getReaderCacheHelper() {
+    throw new UnsupportedOperationException();
+  }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java
index b739b14..4098cb0 100644
--- a/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/DocValuesWriter.java
@@ -21,12 +21,8 @@ import java.io.IOException;
 
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.SortField;
 
-abstract class DocValuesWriter {
-  abstract void finish(int numDoc);
+abstract class DocValuesWriter<T extends DocIdSetIterator> {
   abstract void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer consumer) throws IOException;
-  abstract Sorter.DocComparator getDocComparator(int numDoc, SortField sortField) throws IOException;
-  abstract DocIdSetIterator getDocIdSet();
-
+  abstract T getDocValues();
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexSorter.java b/lucene/core/src/java/org/apache/lucene/index/IndexSorter.java
new file mode 100644
index 0000000..81fdf62
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexSorter.java
@@ -0,0 +1,448 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.index;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+
+import org.apache.lucene.search.FieldComparator;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.util.LongValues;
+import org.apache.lucene.util.NumericUtils;
+import org.apache.lucene.util.packed.PackedInts;
+
+import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
+
+/**
+ * Handles how documents should be sorted in an index, both within a segment and between
+ * segments.
+ *
+ * Implementers must provide the following methods:
+ * {@link #getDocComparator(LeafReader,int)} - an object that determines how documents within a segment are to be sorted
+ * {@link #getComparableProviders(List)} - an array of objects that return a sortable long value per document and segment
+ * {@link #getProviderName()} - the SPI-registered name of a {@link SortFieldProvider} to serialize the sort
+ *
+ * The companion {@link SortFieldProvider} should be registered with SPI via {@code META-INF/services}
+ */
+public interface IndexSorter {
+
+  /** Used for sorting documents across segments */
+  interface ComparableProvider {
+    /**
+     * Returns a long so that the natural ordering of long values matches the
+     * ordering of doc IDs for the given comparator
+     */
+    long getAsComparableLong(int docID) throws IOException;
+  }
+
+  /** A comparator of doc IDs, used for sorting documents within a segment */
+  interface DocComparator {
+    /** Compare docID1 against docID2. The contract for the return value is the
+     *  same as {@link Comparator#compare(Object, Object)}. */
+    int compare(int docID1, int docID2);
+  }
+
+  /**
+   * Get an array of {@link ComparableProvider}, one per segment, for merge sorting documents in different segments
+   * @param readers the readers to be merged
+   */
+  ComparableProvider[] getComparableProviders(List<? extends LeafReader> readers) throws IOException;
+
+  /**
+   * Get a comparator that determines the sort order of docs within a single Reader.
+   *
+   * NB We cannot simply use the {@link FieldComparator} API because it requires docIDs to be sent
+   * in-order. The default implementations allocate array[maxDoc] to hold native values for comparison,
+   * but 1) they are transient (only alive while sorting this one segment) and 2) in the typical
+   * index sorting case, they are only used to sort newly flushed segments, which will be smaller
+   * than merged segments
+   *
+   * @param reader the Reader to sort
+   * @param maxDoc the number of documents in the Reader
+   */
+  DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException;
+
+  /**
+   * The SPI-registered name of a {@link SortFieldProvider} that will deserialize the parent SortField
+   */
+  String getProviderName();
+
+  /**
+   * Provide a NumericDocValues instance for a LeafReader
+   */
+  interface NumericDocValuesProvider {
+    /**
+     * Returns the NumericDocValues instance for this LeafReader
+     */
+    NumericDocValues get(LeafReader reader) throws IOException;
+  }
+
+  /**
+   * Provide a SortedDocValues instance for a LeafReader
+   */
+  interface SortedDocValuesProvider {
+    /**
+     * Returns the SortedDocValues instance for this LeafReader
+     */
+    SortedDocValues get(LeafReader reader) throws IOException;
+  }
+
+  /**
+   * Sorts documents based on integer values from a NumericDocValues instance
+   */
+  final class IntSorter implements IndexSorter {
+
+    private final Integer missingValue;
+    private final int reverseMul;
+    private final NumericDocValuesProvider valuesProvider;
+    private final String providerName;
+
+    /**
+     * Creates a new IntSorter
+     */
+    public IntSorter(String providerName, Integer missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) {
+      this.missingValue = missingValue;
+      this.reverseMul = reverse ? -1 : 1;
+      this.valuesProvider = valuesProvider;
+      this.providerName = providerName;
+    }
+
+    @Override
+    public ComparableProvider[] getComparableProviders(List<? extends LeafReader> readers) throws IOException {
+      ComparableProvider[] providers = new ComparableProvider[readers.size()];
+      final long missingValue;
+      if (this.missingValue != null) {
+        missingValue = this.missingValue;
+      } else {
+        missingValue = 0L;
+      }
+
+      for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
+        final NumericDocValues values = valuesProvider.get(readers.get(readerIndex));
+
+        providers[readerIndex] = docID -> {
+          if (values.advanceExact(docID)) {
+            return values.longValue();
+          } else {
+            return missingValue;
+          }
+        };
+      }
+      return providers;
+    }
+
+    @Override
+    public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException {
+      final NumericDocValues dvs = valuesProvider.get(reader);
+      int[] values = new int[maxDoc];
+      if (this.missingValue != null) {
+        Arrays.fill(values, this.missingValue);
+      }
+      while (true) {
+        int docID = dvs.nextDoc();
+        if (docID == NO_MORE_DOCS) {
+          break;
+        }
+        values[docID] = (int) dvs.longValue();
+      }
+
+      return (docID1, docID2) -> reverseMul * Integer.compare(values[docID1], values[docID2]);
+    }
+
+    @Override
+    public String getProviderName() {
+      return providerName;
+    }
+  }
+
+  /**
+   * Sorts documents based on long values from a NumericDocValues instance
+   */
+  final class LongSorter implements IndexSorter {
+
+    private final String providerName;
+    private final Long missingValue;
+    private final int reverseMul;
+    private final NumericDocValuesProvider valuesProvider;
+
+    /** Creates a new LongSorter */
+    public LongSorter(String providerName, Long missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) {
+      this.providerName = providerName;
+      this.missingValue = missingValue;
+      this.reverseMul = reverse ? -1 : 1;
+      this.valuesProvider = valuesProvider;
+    }
+
+    @Override
+    public ComparableProvider[] getComparableProviders(List<? extends LeafReader> readers) throws IOException {
+      ComparableProvider[] providers = new ComparableProvider[readers.size()];
+      final long missingValue;
+      if (this.missingValue != null) {
+        missingValue = this.missingValue;
+      } else {
+        missingValue = 0L;
+      }
+
+      for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
+        final NumericDocValues values = valuesProvider.get(readers.get(readerIndex));
+
+        providers[readerIndex] = docID -> {
+          if (values.advanceExact(docID)) {
+            return values.longValue();
+          } else {
+            return missingValue;
+          }
+        };
+      }
+      return providers;
+    }
+
+    @Override
+    public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException {
+      final NumericDocValues dvs = valuesProvider.get(reader);
+      long[] values = new long[maxDoc];
+      if (this.missingValue != null) {
+        Arrays.fill(values, this.missingValue);
+      }
+      while (true) {
+        int docID = dvs.nextDoc();
+        if (docID == NO_MORE_DOCS) {
+          break;
+        }
+        values[docID] = dvs.longValue();
+      }
+
+      return (docID1, docID2) -> reverseMul * Long.compare(values[docID1], values[docID2]);
+    }
+
+    @Override
+    public String getProviderName() {
+      return providerName;
+    }
+  }
+
+  /**
+   * Sorts documents based on float values from a NumericDocValues instance
+   */
+  final class FloatSorter implements IndexSorter {
+
+    private final String providerName;
+    private final Float missingValue;
+    private final int reverseMul;
+    private final NumericDocValuesProvider valuesProvider;
+
+    /** Creates a new FloatSorter */
+    public FloatSorter(String providerName, Float missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) {
+      this.providerName = providerName;
+      this.missingValue = missingValue;
+      this.reverseMul = reverse ? -1 : 1;
+      this.valuesProvider = valuesProvider;
+    }
+
+    @Override
+    public ComparableProvider[] getComparableProviders(List<? extends LeafReader> readers) throws IOException {
+      ComparableProvider[] providers = new ComparableProvider[readers.size()];
+      final float missingValue;
+      if (this.missingValue != null) {
+        missingValue = this.missingValue;
+      } else {
+        missingValue = 0.0f;
+      }
+
+      for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
+        final NumericDocValues values = valuesProvider.get(readers.get(readerIndex));
+
+        providers[readerIndex] = docID -> {
+          float value = missingValue;
+          if (values.advanceExact(docID)) {
+            value = Float.intBitsToFloat((int) values.longValue());
+          }
+          return NumericUtils.floatToSortableInt(value);
+        };
+      }
+      return providers;
+    }
+
+    @Override
+    public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException {
+      final NumericDocValues dvs = valuesProvider.get(reader);
+      float[] values = new float[maxDoc];
+      if (this.missingValue != null) {
+        Arrays.fill(values, this.missingValue);
+      }
+      while (true) {
+        int docID = dvs.nextDoc();
+        if (docID == NO_MORE_DOCS) {
+          break;
+        }
+        values[docID] = Float.intBitsToFloat((int) dvs.longValue());
+      }
+
+      return (docID1, docID2) -> reverseMul * Float.compare(values[docID1], values[docID2]);
+    }
+
+    @Override
+    public String getProviderName() {
+      return providerName;
+    }
+  }
+
+  /**
+   * Sorts documents based on double values from a NumericDocValues instance
+   */
+  final class DoubleSorter implements IndexSorter {
+
+    private final String providerName;
+    private final Double missingValue;
+    private final int reverseMul;
+    private final NumericDocValuesProvider valuesProvider;
+
+    /** Creates a new DoubleSorter */
+    public DoubleSorter(String providerName, Double missingValue, boolean reverse, NumericDocValuesProvider valuesProvider) {
+      this.providerName = providerName;
+      this.missingValue = missingValue;
+      this.reverseMul = reverse ? -1 : 1;
+      this.valuesProvider = valuesProvider;
+    }
+
+    @Override
+    public ComparableProvider[] getComparableProviders(List<? extends LeafReader> readers) throws IOException {
+      ComparableProvider[] providers = new ComparableProvider[readers.size()];
+      final double missingValue;
+      if (this.missingValue != null) {
+        missingValue = this.missingValue;
+      } else {
+        missingValue = 0.0f;
+      }
+
+      for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
+        final NumericDocValues values = valuesProvider.get(readers.get(readerIndex));
+
+        providers[readerIndex] = docID -> {
+          double value = missingValue;
+          if (values.advanceExact(docID)) {
+            value = Double.longBitsToDouble(values.longValue());
+          }
+          return NumericUtils.doubleToSortableLong(value);
+        };
+      }
+      return providers;
+    }
+
+    @Override
+    public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException {
+      final NumericDocValues dvs = valuesProvider.get(reader);
+      double[] values = new double[maxDoc];
+      if (missingValue != null) {
+        Arrays.fill(values, missingValue);
+      }
+      while (true) {
+        int docID = dvs.nextDoc();
+        if (docID == NO_MORE_DOCS) {
+          break;
+        }
+        values[docID] = Double.longBitsToDouble(dvs.longValue());
+      }
+
+      return (docID1, docID2) -> reverseMul * Double.compare(values[docID1], values[docID2]);
+    }
+
+    @Override
+    public String getProviderName() {
+      return providerName;
+    }
+  }
+
+  /**
+   * Sorts documents based on terms from a SortedDocValues instance
+   */
+  final class StringSorter implements IndexSorter {
+
+    private final String providerName;
+    private final Object missingValue;
+    private final int reverseMul;
+    private final SortedDocValuesProvider valuesProvider;
+
+    /** Creates a new StringSorter */
+    public StringSorter(String providerName, Object missingValue, boolean reverse, SortedDocValuesProvider valuesProvider) {
+      this.providerName = providerName;
+      this.missingValue = missingValue;
+      this.reverseMul = reverse ? -1 : 1;
+      this.valuesProvider = valuesProvider;
+    }
+
+    @Override
+    public ComparableProvider[] getComparableProviders(List<? extends LeafReader> readers) throws IOException {
+      final ComparableProvider[] providers = new ComparableProvider[readers.size()];
+      final SortedDocValues[] values = new SortedDocValues[readers.size()];
+      for(int i=0;i<readers.size();i++) {
+        final SortedDocValues sorted = valuesProvider.get(readers.get(i));
+        values[i] = sorted;
+      }
+      OrdinalMap ordinalMap = OrdinalMap.build(null, values, PackedInts.DEFAULT);
+      final int missingOrd;
+      if (missingValue == SortField.STRING_LAST) {
+        missingOrd = Integer.MAX_VALUE;
+      } else {
+        missingOrd = Integer.MIN_VALUE;
+      }
+
+      for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
+        final SortedDocValues readerValues = values[readerIndex];
+        final LongValues globalOrds = ordinalMap.getGlobalOrds(readerIndex);
+        providers[readerIndex] = docID -> {
+          if (readerValues.advanceExact(docID)) {
+            // translate segment's ord to global ord space:
+            return globalOrds.get(readerValues.ordValue());
+          } else {
+            return missingOrd;
+          }
+        };
+      }
+      return providers;
+    }
+
+    @Override
+    public DocComparator getDocComparator(LeafReader reader, int maxDoc) throws IOException {
+      final SortedDocValues sorted = valuesProvider.get(reader);
+      final int missingOrd;
+      if (missingValue == SortField.STRING_LAST) {
+        missingOrd = Integer.MAX_VALUE;
+      } else {
+        missingOrd = Integer.MIN_VALUE;
+      }
+
+      final int[] ords = new int[maxDoc];
+      Arrays.fill(ords, missingOrd);
+      int docID;
+      while ((docID = sorted.nextDoc()) != NO_MORE_DOCS) {
+        ords[docID] = sorted.ordValue();
+      }
+
+      return (docID1, docID2) -> reverseMul * Integer.compare(ords[docID1], ords[docID2]);
+    }
+
+    @Override
+    public String getProviderName() {
+      return providerName;
+    }
+  }
+
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
index 4cdc9c0..26e7e3d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriterConfig.java
@@ -19,7 +19,6 @@ package org.apache.lucene.index;
 
 import java.io.PrintStream;
 import java.util.Arrays;
-import java.util.EnumSet;
 import java.util.stream.Collectors;
 
 import org.apache.lucene.analysis.Analyzer;
@@ -32,9 +31,9 @@ import org.apache.lucene.search.SortField;
 import org.apache.lucene.search.similarities.Similarity;
 import org.apache.lucene.util.InfoStream;
 import org.apache.lucene.util.PrintStreamInfoStream;
+import org.apache.lucene.util.SetOnce;
 import org.apache.lucene.util.SetOnce.AlreadySetException;
 import org.apache.lucene.util.Version;
-import org.apache.lucene.util.SetOnce;
 
 /**
  * Holds all the configuration that is used to create an {@link IndexWriter}.
@@ -460,21 +459,13 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
     return this;
   }
 
-  /** We only allow sorting on these types */
-  private static final EnumSet<SortField.Type> ALLOWED_INDEX_SORT_TYPES = EnumSet.of(SortField.Type.STRING,
-                                                                                     SortField.Type.LONG,
-                                                                                     SortField.Type.INT,
-                                                                                     SortField.Type.DOUBLE,
-                                                                                     SortField.Type.FLOAT);
-
   /**
    * Set the {@link Sort} order to use for all (flushed and merged) segments.
    */
   public IndexWriterConfig setIndexSort(Sort sort) {
-    for(SortField sortField : sort.getSort()) {
-      final SortField.Type sortType = Sorter.getSortFieldType(sortField);
-      if (ALLOWED_INDEX_SORT_TYPES.contains(sortType) == false) {
-        throw new IllegalArgumentException("invalid SortField type: must be one of " + ALLOWED_INDEX_SORT_TYPES + " but got: " + sortField);
+    for (SortField sortField : sort.getSort()) {
+      if (sortField.getIndexSorter() == null) {
+        throw new IllegalArgumentException("Cannot sort index with sort field " + sortField);
       }
     }
     this.indexSort = sort;
diff --git a/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java b/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java
index 2348701..35d1441 100644
--- a/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/MultiSorter.java
@@ -24,8 +24,6 @@ import org.apache.lucene.index.MergeState.DocMap;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.Bits;
-import org.apache.lucene.util.LongValues;
-import org.apache.lucene.util.NumericUtils;
 import org.apache.lucene.util.PriorityQueue;
 import org.apache.lucene.util.packed.PackedInts;
 import org.apache.lucene.util.packed.PackedLongValues;
@@ -41,10 +39,14 @@ final class MultiSorter {
     // TODO: optimize if only 1 reader is incoming, though that's a rare case
 
     SortField fields[] = sort.getSort();
-    final ComparableProvider[][] comparables = new ComparableProvider[fields.length][];
+    final IndexSorter.ComparableProvider[][] comparables = new IndexSorter.ComparableProvider[fields.length][];
     final int[] reverseMuls = new int[fields.length];
     for(int i=0;i<fields.length;i++) {
-      comparables[i] = getComparableProviders(readers, fields[i]);
+      IndexSorter sorter = fields[i].getIndexSorter();
+      if (sorter == null) {
+        throw new IllegalArgumentException("Cannot use sort field " + fields[i] + " for index sorting");
+      }
+      comparables[i] = sorter.getComparableProviders(readers);
       reverseMuls[i] = fields[i].getReverse() ? -1 : 1;
     }
     int leafCount = readers.size();
@@ -142,138 +144,4 @@ final class MultiSorter {
       this.valuesAsComparableLongs = new long[numComparables];
     }
   }
-
-  /** Returns a long so that the natural ordering of long values matches the
-   *  ordering of doc IDs for the given comparator. */
-  private interface ComparableProvider {
-    long getAsComparableLong(int docID) throws IOException;
-  }
-
-  /** Returns {@code ComparableProvider}s for the provided readers to represent the requested {@link SortField} sort order. */
-  private static ComparableProvider[] getComparableProviders(List<CodecReader> readers, SortField sortField) throws IOException {
-
-    ComparableProvider[] providers = new ComparableProvider[readers.size()];
-    final SortField.Type sortType = Sorter.getSortFieldType(sortField);
-
-    switch(sortType) {
-
-    case STRING:
-      {
-        // this uses the efficient segment-local ordinal map:
-        final SortedDocValues[] values = new SortedDocValues[readers.size()];
-        for(int i=0;i<readers.size();i++) {
-          final SortedDocValues sorted = Sorter.getOrWrapSorted(readers.get(i), sortField);
-          values[i] = sorted;
-        }
-        OrdinalMap ordinalMap = OrdinalMap.build(null, values, PackedInts.DEFAULT);
-        final int missingOrd;
-        if (sortField.getMissingValue() == SortField.STRING_LAST) {
-          missingOrd = Integer.MAX_VALUE;
-        } else {
-          missingOrd = Integer.MIN_VALUE;
-        }
-
-        for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
-          final SortedDocValues readerValues = values[readerIndex];
-          final LongValues globalOrds = ordinalMap.getGlobalOrds(readerIndex);
-          providers[readerIndex] = new ComparableProvider() {
-              @Override
-              public long getAsComparableLong(int docID) throws IOException {
-                if (readerValues.advanceExact(docID)) {
-                  // translate segment's ord to global ord space:
-                  return globalOrds.get(readerValues.ordValue());
-                } else {
-                  return missingOrd;
-                }
-              }
-            };
-        }
-      }
-      break;
-
-    case LONG:
-    case INT:
-      {
-        final long missingValue;
-        if (sortField.getMissingValue() != null) {
-          missingValue = ((Number) sortField.getMissingValue()).longValue();
-        } else {
-          missingValue = 0L;
-        }
-
-        for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
-          final NumericDocValues values = Sorter.getOrWrapNumeric(readers.get(readerIndex), sortField);
-
-          providers[readerIndex] = new ComparableProvider() {
-              @Override
-              public long getAsComparableLong(int docID) throws IOException {
-                if (values.advanceExact(docID)) {
-                  return values.longValue();
-                } else {
-                  return missingValue;
-                }
-              }
-            };
-        }
-      }
-      break;
-
-    case DOUBLE:
-      {
-        final double missingValue;
-        if (sortField.getMissingValue() != null) {
-          missingValue = (Double) sortField.getMissingValue();
-        } else {
-          missingValue = 0.0;
-        }
-
-        for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
-          final NumericDocValues values = Sorter.getOrWrapNumeric(readers.get(readerIndex), sortField);
-
-          providers[readerIndex] = new ComparableProvider() {
-              @Override
-              public long getAsComparableLong(int docID) throws IOException {
-                double value = missingValue;
-                if (values.advanceExact(docID)) {
-                  value = Double.longBitsToDouble(values.longValue());
-                }
-                return NumericUtils.doubleToSortableLong(value);
-              }
-            };
-        }
-      }
-      break;
-
-    case FLOAT:
-      {
-        final float missingValue;
-        if (sortField.getMissingValue() != null) {
-          missingValue = (Float) sortField.getMissingValue();
-        } else {
-          missingValue = 0.0f;
-        }
-
-        for(int readerIndex=0;readerIndex<readers.size();readerIndex++) {
-          final NumericDocValues values = Sorter.getOrWrapNumeric(readers.get(readerIndex), sortField);
-
-          providers[readerIndex] = new ComparableProvider() {
-              @Override
-              public long getAsComparableLong(int docID) throws IOException {
-                float value = missingValue;
-                if (values.advanceExact(docID)) {
-                  value = Float.intBitsToFloat((int) values.longValue());
-                }
-                return NumericUtils.floatToSortableInt(value);
-              }
-            };
-        }
-      }
-      break;
-
-    default:
-      throw new IllegalArgumentException("unhandled SortField.getType()=" + sortField.getType());
-    }
-
-    return providers;
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
index 980849f..87de98f 100644
--- a/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/NumericDocValuesWriter.java
@@ -21,7 +21,6 @@ import java.io.IOException;
 
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.Counter;
 import org.apache.lucene.util.FixedBitSet;
 import org.apache.lucene.util.packed.PackedInts;
@@ -31,7 +30,7 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
 
 /** Buffers up pending long per doc, then flushes when
  *  segment flushes. */
-class NumericDocValuesWriter extends DocValuesWriter {
+class NumericDocValuesWriter extends DocValuesWriter<NumericDocValues> {
 
   private PackedLongValues.Builder pending;
   private PackedLongValues finalValues;
@@ -70,21 +69,11 @@ class NumericDocValuesWriter extends DocValuesWriter {
   }
 
   @Override
-  public void finish(int maxDoc) {
-  }
-
-  @Override
-  Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException {
-    assert finalValues == null;
-    finalValues = pending.build();
-    final BufferedNumericDocValues docValues =
-        new BufferedNumericDocValues(finalValues, docsWithField.iterator());
-    return Sorter.getDocComparator(maxDoc, sortField, () -> null, () -> docValues);
-  }
-
-  @Override
-  DocIdSetIterator getDocIdSet() {
-    return docsWithField.iterator();
+  NumericDocValues getDocValues() {
+    if (finalValues == null) {
+      finalValues = pending.build();
+    }
+    return new BufferedNumericDocValues(finalValues, docsWithField.iterator());
   }
 
   static SortingLeafReader.CachedNumericDVs sortDocValues(int maxDoc, Sorter.DocMap sortMap, NumericDocValues oldDocValues) throws IOException {
@@ -104,16 +93,12 @@ class NumericDocValuesWriter extends DocValuesWriter {
 
   @Override
   public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer dvConsumer) throws IOException {
-    final PackedLongValues values;
     if (finalValues == null) {
-      values = pending.build();
-    } else {
-      values = finalValues;
+      finalValues = pending.build();
     }
-
     final SortingLeafReader.CachedNumericDVs sorted;
     if (sortMap != null) {
-      NumericDocValues oldValues = new BufferedNumericDocValues(values, docsWithField.iterator());
+      NumericDocValues oldValues = new BufferedNumericDocValues(finalValues, docsWithField.iterator());
       sorted = sortDocValues(state.segmentInfo.maxDoc(), sortMap, oldValues);
     } else {
       sorted = null;
@@ -127,7 +112,7 @@ class NumericDocValuesWriter extends DocValuesWriter {
                                      throw new IllegalArgumentException("wrong fieldInfo");
                                    }
                                    if (sorted == null) {
-                                     return new BufferedNumericDocValues(values, docsWithField.iterator());
+                                     return new BufferedNumericDocValues(finalValues, docsWithField.iterator());
                                    } else {
                                      return new SortingLeafReader.SortingNumericDocValues(sorted);
                                    }
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java b/lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java
new file mode 100644
index 0000000..290decd
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/index/SortFieldProvider.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.index;
+
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.store.DataInput;
+import org.apache.lucene.store.DataOutput;
+import org.apache.lucene.util.NamedSPILoader;
+
+/**
+ * Reads/Writes a named SortField from a segment info file, used to record index sorts
+ */
+public abstract class SortFieldProvider implements NamedSPILoader.NamedSPI {
+
+  private static class Holder {
+    private static final NamedSPILoader<SortFieldProvider> LOADER = new NamedSPILoader<>(SortFieldProvider.class);
+
+    static NamedSPILoader<SortFieldProvider> getLoader() {
+      if (LOADER == null) {
+        throw new IllegalStateException("You tried to lookup a SortFieldProvider by name before all SortFieldProviders could be initialized. "+
+            "This likely happens if you call SortFieldProvider#forName from a SortFieldProviders's ctor.");
+      }
+      return LOADER;
+    }
+  }
+
+  /**
+   * Looks up a SortFieldProvider by name
+   */
+  public static SortFieldProvider forName(String name) {
+    return Holder.getLoader().lookup(name);
+  }
+
+  /**
+   * Lists all available SortFieldProviders
+   */
+  public static Set<String> availableSortFieldProviders() {
+    return Holder.getLoader().availableServices();
+  }
+
+  /**
+   * Reloads the SortFieldProvider list from the given {@link ClassLoader}.
+   * Changes to the list are visible after the method ends, all
+   * iterators ({@link #availableSortFieldProviders()} ()},...) stay consistent.
+   *
+   * <p><b>NOTE:</b> Only new SortFieldProviders are added, existing ones are
+   * never removed or replaced.
+   *
+   * <p><em>This method is expensive and should only be called for discovery
+   * of new SortFieldProviders on the given classpath/classloader!</em>
+   */
+  public static void reloadSortFieldProviders(ClassLoader classLoader) {
+    Holder.getLoader().reload(classLoader);
+  }
+
+  /**
+   * Writes a SortField to a DataOutput
+   */
+  public static void write(SortField sf, DataOutput output) throws IOException {
+    IndexSorter sorter = sf.getIndexSorter();
+    if (sorter == null) {
+      throw new IllegalArgumentException("Cannot serialize sort field " + sf);
+    }
+    SortFieldProvider provider = SortFieldProvider.forName(sorter.getProviderName());
+    provider.writeSortField(sf, output);
+  }
+
+  /** The name this SortFieldProvider is registered under */
+  protected final String name;
+
+  /**
+   * Creates a new SortFieldProvider.
+   * <p>
+   * The provided name will be written into the index segment: in order to
+   * for the segment to be read this class should be registered with Java's
+   * SPI mechanism (registered in META-INF/ of your jar file, etc).
+   * @param name must be all ascii alphanumeric, and less than 128 characters in length.
+   */
+  protected SortFieldProvider(String name) {
+    this.name = name;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * Reads a SortField from serialized bytes
+   */
+  public abstract SortField readSortField(DataInput in) throws IOException;
+
+  /**
+   * Writes a SortField to a DataOutput
+   *
+   * This is used to record index sort information in segment headers
+   */
+  public abstract void writeSortField(SortField sf, DataOutput out) throws IOException;
+
+}
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
index 86d0f0b..2252f00 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedDocValuesWriter.java
@@ -21,7 +21,6 @@ import java.util.Arrays;
 
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.SortField;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.BytesRefHash;
@@ -35,7 +34,7 @@ import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
 
 /** Buffers up pending byte[] per doc, deref and sorting via
  *  int ord, then flushes when segment flushes. */
-class SortedDocValuesWriter extends DocValuesWriter {
+class SortedDocValuesWriter extends DocValuesWriter<SortedDocValues> {
   final BytesRefHash hash;
   private PackedLongValues.Builder pending;
   private DocsWithFieldSet docsWithField;
@@ -79,11 +78,6 @@ class SortedDocValuesWriter extends DocValuesWriter {
     lastDocID = docID;
   }
 
-  @Override
-  public void finish(int maxDoc) {
-    updateBytesUsed();
-  }
-
   private void addOneValue(BytesRef value) {
     int termID = hash.add(value);
     if (termID < 0) {
@@ -107,20 +101,20 @@ class SortedDocValuesWriter extends DocValuesWriter {
   }
 
   @Override
-  Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException {
-    assert sortField.getType().equals(SortField.Type.STRING);
-    assert finalSortedValues == null && finalOrdMap == null &&finalOrds == null;
+  SortedDocValues getDocValues() {
     int valueCount = hash.size();
-    finalSortedValues = hash.sort();
-    finalOrds = pending.build();
-    finalOrdMap = new int[valueCount];
+    if (finalSortedValues == null) {
+      updateBytesUsed();
+      assert finalOrdMap == null && finalOrds == null;
+      finalSortedValues = hash.sort();
+      finalOrds = pending.build();
+      finalOrdMap = new int[valueCount];
+    }
     for (int ord = 0; ord < valueCount; ord++) {
       finalOrdMap[finalSortedValues[ord]] = ord;
     }
-    final SortedDocValues docValues =
-        new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap,
+    return new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap,
             docsWithField.iterator());
-    return Sorter.getDocComparator(maxDoc, sortField, () -> docValues, () -> null);
   }
 
   private int[] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedDocValues oldValues) throws IOException {
@@ -137,26 +131,20 @@ class SortedDocValuesWriter extends DocValuesWriter {
   @Override
   public void flush(SegmentWriteState state, Sorter.DocMap sortMap, DocValuesConsumer dvConsumer) throws IOException {
     final int valueCount = hash.size();
-    final PackedLongValues ords;
-    final int[] sortedValues;
-    final int[] ordMap;
     if (finalOrds == null) {
-      sortedValues = hash.sort();
-      ords = pending.build();
-      ordMap = new int[valueCount];
+      updateBytesUsed();
+      finalSortedValues = hash.sort();
+      finalOrds = pending.build();
+      finalOrdMap = new int[valueCount];
       for (int ord = 0; ord < valueCount; ord++) {
-        ordMap[sortedValues[ord]] = ord;
+        finalOrdMap[finalSortedValues[ord]] = ord;
       }
-    } else {
-      sortedValues = finalSortedValues;
-      ords = finalOrds;
-      ordMap = finalOrdMap;
     }
 
     final int[] sorted;
     if (sortMap != null) {
       sorted = sortDocValues(state.segmentInfo.maxDoc(), sortMap,
-          new BufferedSortedDocValues(hash, valueCount, ords, sortedValues, ordMap, docsWithField.iterator()));
+          new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap, docsWithField.iterator()));
     } else {
       sorted = null;
     }
@@ -168,7 +156,7 @@ class SortedDocValuesWriter extends DocValuesWriter {
                                     throw new IllegalArgumentException("wrong fieldInfo");
                                   }
                                   final SortedDocValues buf =
-                                      new BufferedSortedDocValues(hash, valueCount, ords, sortedValues, ordMap, docsWithField.iterator());
+                                      new BufferedSortedDocValues(hash, valueCount, finalOrds, finalSortedValues, finalOrdMap, docsWithField.iterator());
                                   if (sorted == null) {
                                    return buf;
                                   }
@@ -245,8 +233,4 @@ class SortedDocValuesWriter extends DocValuesWriter {
     }
   }
 
-  @Override
-  DocIdSetIterator getDocIdSet() {
-    return docsWithField.iterator();
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
index bdc65cc..83c394f 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedNumericDocValuesWriter.java
@@ -22,9 +22,6 @@ import java.util.Arrays;
 
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.SortedNumericSelector;
-import org.apache.lucene.search.SortedNumericSortField;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.Counter;
 import org.apache.lucene.util.RamUsageEstimator;
@@ -34,7 +31,7 @@ import org.apache.lucene.util.packed.PackedLongValues;
 import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
 
 /** Buffers up pending long[] per doc, sorts, then flushes when segment flushes. */
-class SortedNumericDocValuesWriter extends DocValuesWriter {
+class SortedNumericDocValuesWriter extends DocValuesWriter<SortedNumericDocValues> {
   private PackedLongValues.Builder pending; // stream of all values
   private PackedLongValues.Builder pendingCounts; // count of values per doc
   private DocsWithFieldSet docsWithField;
@@ -85,11 +82,6 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
     docsWithField.add(currentDoc);
   }
 
-  @Override
-  public void finish(int maxDoc) {
-    finishCurrentDoc();
-  }
-
   private void addOneValue(long value) {
     if (currentUpto == currentValues.length) {
       currentValues = ArrayUtil.grow(currentValues, currentValues.length+1);
@@ -106,16 +98,14 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
   }
 
   @Override
-  Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException {
-    assert sortField instanceof SortedNumericSortField;
-    assert finalValues == null && finalValuesCount == null;
-    finalValues = pending.build();
-    finalValuesCount = pendingCounts.build();
-    final SortedNumericDocValues docValues =
-        new BufferedSortedNumericDocValues(finalValues, finalValuesCount, docsWithField.iterator());
-    SortedNumericSortField sf = (SortedNumericSortField) sortField;
-    return Sorter.getDocComparator(maxDoc, sf, () -> null,
-        () -> SortedNumericSelector.wrap(docValues, sf.getSelector(), sf.getNumericType()));
+  SortedNumericDocValues getDocValues() {
+    if (finalValues == null) {
+      assert finalValuesCount == null;
+      finishCurrentDoc();
+      finalValues = pending.build();
+      finalValuesCount = pendingCounts.build();
+    }
+    return new BufferedSortedNumericDocValues(finalValues, finalValuesCount, docsWithField.iterator());
   }
 
   private long[][] sortDocValues(int maxDoc, Sorter.DocMap sortMap, SortedNumericDocValues oldValues) throws IOException {
@@ -137,6 +127,7 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
     final PackedLongValues values;
     final PackedLongValues valueCounts;
     if (finalValues == null) {
+      finishCurrentDoc();
       values = pending.build();
       valueCounts = pendingCounts.build();
     } else {
@@ -232,8 +223,4 @@ class SortedNumericDocValuesWriter extends DocValuesWriter {
     }
   }
 
-  @Override
-  DocIdSetIterator getDocIdSet() {
-    return docsWithField.iterator();
-  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
index 71a14a5..022b17d 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SortedSetDocValuesWriter.java
@@ -21,9 +21,6 @@ import java.util.Arrays;
 
 import org.apache.lucene.codecs.DocValuesConsumer;
 import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.SortedSetSelector;
-import org.apache.lucene.search.SortedSetSortField;
 import org.apache.lucene.util.ArrayUtil;
 import org.apache.lucene.util.ByteBlockPool;
 import org.apache.lucene.util.BytesRef;
@@ -39,7 +36,7 @@ import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;
 
 /** Buffers up pending byte[]s per doc, deref and sorting via
  *  int ord, then flushes when segment flushes. */
-class SortedSetDocValuesWriter extends DocValuesWriter {
+class SortedSetDocValuesWriter extends DocValuesWriter<SortedSetDocValues> {
   final BytesRefHash hash;
   private PackedLongValues.Builder pending; // stream of all termIDs
   private PackedLongValues.Builder pendingCounts; // termIDs per doc
@@ -115,11 +112,6 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
     docsWithField.add(currentDoc);
   }
 
-  @Override
-  public void finish(int maxDoc) {
-    finishCurrentDoc();
-  }
-
   private void addOneValue(BytesRef value) {
     int termID = hash.add(value);
     if (termID < 0) {
@@ -170,22 +162,20 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
   }
 
   @Override
-  Sorter.DocComparator getDocComparator(int maxDoc, SortField sortField) throws IOException {
-    assert sortField instanceof SortedSetSortField;
-    assert finalOrds == null && finalOrdCounts == null && finalSortedValues == null && finalOrdMap == null;
-    int valueCount = hash.size();
-    finalOrds = pending.build();
-    finalOrdCounts = pendingCounts.build();
-    finalSortedValues = hash.sort();
-    finalOrdMap = new int[valueCount];
-    for (int ord = 0; ord < valueCount; ord++) {
+  SortedSetDocValues getDocValues() {
+    if (finalOrds == null) {
+      assert finalOrdCounts == null && finalSortedValues == null && finalOrdMap == null;
+      finishCurrentDoc();
+      int valueCount = hash.size();
+      finalOrds = pending.build();
+      finalOrdCounts = pendingCounts.build();
+      finalSortedValues = hash.sort();
+      finalOrdMap = new int[valueCount];
+    }
+    for (int ord = 0; ord < finalOrdMap.length; ord++) {
       finalOrdMap[finalSortedValues[ord]] = ord;
     }
-
-    SortedSetSortField sf = (SortedSetSortField) sortField;
-    final SortedSetDocValues dvs =
-        new BufferedSortedSetDocValues(finalSortedValues, finalOrdMap, hash, finalOrds, finalOrdCounts, maxCount, docsWithField.iterator());
-    return Sorter.getDocComparator(maxDoc, sf, () -> SortedSetSelector.wrap(dvs, sf.getSelector()), () -> null);
+    return new BufferedSortedSetDocValues(finalSortedValues, finalOrdMap, hash, finalOrds, finalOrdCounts, maxCount, docsWithField.iterator());
   }
 
   @Override
@@ -196,7 +186,9 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
     final int[] sortedValues;
     final int[] ordMap;
 
-    if (finalOrdCounts == null) {
+    if (finalOrds == null) {
+      assert finalOrdCounts == null && finalSortedValues == null && finalOrdMap == null;
+      finishCurrentDoc();
       ords = pending.build();
       ordCounts = pendingCounts.build();
       sortedValues = hash.sort();
@@ -315,8 +307,5 @@ class SortedSetDocValuesWriter extends DocValuesWriter {
       return scratch;
     }
   }
-  @Override
-  DocIdSetIterator getDocIdSet() {
-    return docsWithField.iterator();
-  }
+
 }
diff --git a/lucene/core/src/java/org/apache/lucene/index/Sorter.java b/lucene/core/src/java/org/apache/lucene/index/Sorter.java
index 5f43c5a..c860569 100644
--- a/lucene/core/src/java/org/apache/lucene/index/Sorter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/Sorter.java
@@ -17,22 +17,13 @@
 package org.apache.lucene.index;
 
 import java.io.IOException;
-import java.util.Arrays;
-import java.util.Comparator;
 
-import org.apache.lucene.search.FieldComparator;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.search.SortField;
-import org.apache.lucene.search.SortedNumericSelector;
-import org.apache.lucene.search.SortedNumericSortField;
-import org.apache.lucene.search.SortedSetSelector;
-import org.apache.lucene.search.SortedSetSortField;
 import org.apache.lucene.util.TimSorter;
 import org.apache.lucene.util.packed.PackedInts;
 import org.apache.lucene.util.packed.PackedLongValues;
 
-import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
-
 /**
  * Sorts documents of a given index by returning a permutation on the document
  * IDs.
@@ -84,21 +75,13 @@ final class Sorter {
     return true;
   }
 
-  /** A comparator of doc IDs. */
-  static abstract class DocComparator {
-
-    /** Compare docID1 against docID2. The contract for the return value is the
-     *  same as {@link Comparator#compare(Object, Object)}. */
-    public abstract int compare(int docID1, int docID2);
-  }
-
   private static final class DocValueSorter extends TimSorter {
     
     private final int[] docs;
-    private final Sorter.DocComparator comparator;
+    private final IndexSorter.DocComparator comparator;
     private final int[] tmp;
     
-    DocValueSorter(int[] docs, Sorter.DocComparator comparator) {
+    DocValueSorter(int[] docs, IndexSorter.DocComparator comparator) {
       super(docs.length / 64);
       this.docs = docs;
       this.comparator = comparator;
@@ -139,7 +122,7 @@ final class Sorter {
   }
 
   /** Computes the old-to-new permutation over the given comparator. */
-  private static Sorter.DocMap sort(final int maxDoc, DocComparator comparator) {
+  private static Sorter.DocMap sort(final int maxDoc, IndexSorter.DocComparator comparator) {
     // check if the index is sorted
     boolean sorted = true;
     for (int i = 1; i < maxDoc; ++i) {
@@ -202,196 +185,10 @@ final class Sorter {
     };
   }
 
-  /** Returns the native sort type for {@link SortedSetSortField} and {@link SortedNumericSortField},
-   * {@link SortField#getType()} otherwise */
-  static SortField.Type getSortFieldType(SortField sortField) {
-    if (sortField instanceof SortedSetSortField) {
-      return SortField.Type.STRING;
-    } else if (sortField instanceof SortedNumericSortField) {
-      return ((SortedNumericSortField) sortField).getNumericType();
-    } else {
-      return sortField.getType();
-    }
-  }
-
-  /** Wraps a {@link SortedNumericDocValues} as a single-valued view if the field is an instance of {@link SortedNumericSortField},
-   * returns {@link NumericDocValues} for the field otherwise. */
-  static NumericDocValues getOrWrapNumeric(LeafReader reader, SortField sortField) throws IOException {
-    if (sortField instanceof SortedNumericSortField) {
-      SortedNumericSortField sf = (SortedNumericSortField) sortField;
-      return SortedNumericSelector.wrap(DocValues.getSortedNumeric(reader, sf.getField()), sf.getSelector(), sf.getNumericType());
-    } else {
-      return DocValues.getNumeric(reader, sortField.getField());
-    }
-  }
-
-  /** Wraps a {@link SortedSetDocValues} as a single-valued view if the field is an instance of {@link SortedSetSortField},
-   * returns {@link SortedDocValues} for the field otherwise. */
-  static SortedDocValues getOrWrapSorted(LeafReader reader, SortField sortField) throws IOException {
-    if (sortField instanceof SortedSetSortField) {
-      SortedSetSortField sf = (SortedSetSortField) sortField;
-      return SortedSetSelector.wrap(DocValues.getSortedSet(reader, sf.getField()), sf.getSelector());
-    } else {
-      return DocValues.getSorted(reader, sortField.getField());
-    }
-  }
-
-  static DocComparator getDocComparator(LeafReader reader, SortField sortField) throws IOException {
-    return getDocComparator(reader.maxDoc(), sortField,
-        () -> getOrWrapSorted(reader, sortField),
-        () -> getOrWrapNumeric(reader, sortField));
-  }
-
-  interface NumericDocValuesSupplier {
-    NumericDocValues get() throws IOException;
-  }
-
-  interface SortedDocValuesSupplier {
-    SortedDocValues get() throws IOException;
-  }
-
-  /** We cannot use the {@link FieldComparator} API because that API requires that you send it docIDs in order.  Note that this API
-   *  allocates arrays[maxDoc] to hold the native values needed for comparison, but 1) they are transient (only alive while sorting this one
-   *  segment), and 2) in the typical index sorting case, they are only used to sort newly flushed segments, which will be smaller than
-   *  merged segments.  */
-  static DocComparator getDocComparator(int maxDoc,
-                                        SortField sortField,
-                                        SortedDocValuesSupplier sortedProvider,
-                                        NumericDocValuesSupplier numericProvider) throws IOException {
-
-    final int reverseMul = sortField.getReverse() ? -1 : 1;
-    final SortField.Type sortType = getSortFieldType(sortField);
-
-    switch(sortType) {
-
-      case STRING:
-      {
-        final SortedDocValues sorted = sortedProvider.get();
-        final int missingOrd;
-        if (sortField.getMissingValue() == SortField.STRING_LAST) {
-          missingOrd = Integer.MAX_VALUE;
-        } else {
-          missingOrd = Integer.MIN_VALUE;
-        }
-
-        final int[] ords = new int[maxDoc];
-        Arrays.fill(ords, missingOrd);
-        int docID;
-        while ((docID = sorted.nextDoc()) != NO_MORE_DOCS) {
-          ords[docID] = sorted.ordValue();
-        }
-
-        return new DocComparator() {
-          @Override
-          public int compare(int docID1, int docID2) {
-            return reverseMul * Integer.compare(ords[docID1], ords[docID2]);
-          }
-        };
-      }
-
-      case LONG:
-      {
-        final NumericDocValues dvs = numericProvider.get();
-        long[] values = new long[maxDoc];
-        if (sortField.getMissingValue() != null) {
-          Arrays.fill(values, (Long) sortField.getMissingValue());
-        }
-        while (true) {
-          int docID = dvs.nextDoc();
-          if (docID == NO_MORE_DOCS) {
-            break;
-          }
-          values[docID] = dvs.longValue();
-        }
-
-        return new DocComparator() {
-          @Override
-          public int compare(int docID1, int docID2) {
-            return reverseMul * Long.compare(values[docID1], values[docID2]);
-          }
-        };
-      }
-
-      case INT:
-      {
-        final NumericDocValues dvs = numericProvider.get();
-        int[] values = new int[maxDoc];
-        if (sortField.getMissingValue() != null) {
-          Arrays.fill(values, (Integer) sortField.getMissingValue());
-        }
-
-        while (true) {
-          int docID = dvs.nextDoc();
-          if (docID == NO_MORE_DOCS) {
-            break;
-          }
-          values[docID] = (int) dvs.longValue();
-        }
-
-        return new DocComparator() {
-          @Override
-          public int compare(int docID1, int docID2) {
-            return reverseMul * Integer.compare(values[docID1], values[docID2]);
-          }
-        };
-      }
-
-      case DOUBLE:
-      {
-        final NumericDocValues dvs = numericProvider.get();
-        double[] values = new double[maxDoc];
-        if (sortField.getMissingValue() != null) {
-          Arrays.fill(values, (Double) sortField.getMissingValue());
-        }
-        while (true) {
-          int docID = dvs.nextDoc();
-          if (docID == NO_MORE_DOCS) {
-            break;
-          }
-          values[docID] = Double.longBitsToDouble(dvs.longValue());
-        }
-
-        return new DocComparator() {
-          @Override
-          public int compare(int docID1, int docID2) {
-            return reverseMul * Double.compare(values[docID1], values[docID2]);
-          }
-        };
-      }
-
-      case FLOAT:
-      {
-        final NumericDocValues dvs = numericProvider.get();
-        float[] values = new float[maxDoc];
-        if (sortField.getMissingValue() != null) {
-          Arrays.fill(values, (Float) sortField.getMissingValue());
-        }
-        while (true) {
-          int docID = dvs.nextDoc();
-          if (docID == NO_MORE_DOCS) {
-            break;
-          }
-          values[docID] = Float.intBitsToFloat((int) dvs.longValue());
-        }
-
-        return new DocComparator() {
-          @Override
-          public int compare(int docID1, int docID2) {
-            return reverseMul * Float.compare(values[docID1], values[docID2]);
-          }
-        };
-      }
-
-      default:
-        throw new IllegalArgumentException("unhandled SortField.getType()=" + sortField.getType());
-    }
-  }
-
-
   /**
    * Returns a mapping from the old document ID to its new location in the
    * sorted index. Implementations can use the auxiliary
-   * {@link #sort(int, DocComparator)} to compute the old-to-new permutation
+   * {@link #sort(int, IndexSorter.DocComparator)} to compute the old-to-new permutation
    * given a list of documents and their corresponding values.
    * <p>
    * A return value of <code>null</code> is allowed and means that
@@ -401,28 +198,29 @@ final class Sorter {
    * well, they will however be marked as deleted in the sorted view.
    */
   DocMap sort(LeafReader reader) throws IOException {
-    SortField fields[] = sort.getSort();
-    final DocComparator comparators[] = new DocComparator[fields.length];
+    SortField[] fields = sort.getSort();
+    final IndexSorter.DocComparator[] comparators = new IndexSorter.DocComparator[fields.length];
 
     for (int i = 0; i < fields.length; i++) {
-      comparators[i] = getDocComparator(reader, fields[i]);
+      IndexSorter sorter = fields[i].getIndexSorter();
+      if (sorter == null) {
+        throw new IllegalArgumentException("Cannot use sortfield + "  + fields[i] + " to sort indexes");
+      }
+      comparators[i] = sorter.getDocComparator(reader, reader.maxDoc());
     }
     return sort(reader.maxDoc(), comparators);
   }
 
 
-  DocMap sort(int maxDoc, DocComparator[] comparators) throws IOException {
-    final DocComparator comparator = new DocComparator() {
-      @Override
-      public int compare(int docID1, int docID2) {
-        for (int i = 0; i < comparators.length; i++) {
-          int comp = comparators[i].compare(docID1, docID2);
-          if (comp != 0) {
-            return comp;
-          }
+  DocMap sort(int maxDoc, IndexSorter.DocComparator[] comparators) throws IOException {
+    final IndexSorter.DocComparator comparator = (docID1, docID2) -> {
+      for (int i = 0; i < comparators.length; i++) {
+        int comp = comparators[i].compare(docID1, docID2);
+        if (comp != 0) {
+          return comp;
         }
-        return Integer.compare(docID1, docID2); // docid order tiebreak
       }
+      return Integer.compare(docID1, docID2); // docid order tiebreak
     };
 
     return sort(maxDoc, comparator);
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortField.java b/lucene/core/src/java/org/apache/lucene/search/SortField.java
index 2cfae46..7512ec9 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortField.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortField.java
@@ -21,7 +21,13 @@ import java.io.IOException;
 import java.util.Comparator;
 import java.util.Objects;
 
+import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.IndexSorter;
+import org.apache.lucene.index.SortFieldProvider;
+import org.apache.lucene.store.DataInput;
+import org.apache.lucene.store.DataOutput;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.NumericUtils;
 
 /**
  * Stores information about how to sort documents by terms in an individual
@@ -120,6 +126,106 @@ public class SortField {
     this.reverse = reverse;
   }
 
+  /** A SortFieldProvider for field sorts */
+  public static final class Provider extends SortFieldProvider {
+
+    /** The name this Provider is registered under */
+    public static final String NAME = "SortField";
+
+    /** Creates a new Provider */
+    public Provider() {
+      super(NAME);
+    }
+
+    @Override
+    public SortField readSortField(DataInput in) throws IOException {
+      SortField sf = new SortField(in.readString(), readType(in), in.readInt() == 1);
+      if (in.readInt() == 1) {
+        // missing object
+        switch (sf.type) {
+          case STRING:
+            int missingString = in.readInt();
+            if (missingString == 1) {
+              sf.setMissingValue(STRING_FIRST);
+            }
+            else {
+              sf.setMissingValue(STRING_LAST);
+            }
+            break;
+          case INT:
+            sf.setMissingValue(in.readInt());
+            break;
+          case LONG:
+            sf.setMissingValue(in.readLong());
+            break;
+          case FLOAT:
+            sf.setMissingValue(NumericUtils.sortableIntToFloat(in.readInt()));
+            break;
+          case DOUBLE:
+            sf.setMissingValue(NumericUtils.sortableLongToDouble(in.readLong()));
+            break;
+          default:
+            throw new IllegalArgumentException("Cannot deserialize sort of type " + sf.type);
+        }
+      }
+      return sf;
+    }
+
+    @Override
+    public void writeSortField(SortField sf, DataOutput out) throws IOException {
+      sf.serialize(out);
+    }
+  }
+
+  protected static Type readType(DataInput in) throws IOException {
+    String type = in.readString();
+    try {
+      return Type.valueOf(type);
+    }
+    catch (IllegalArgumentException e) {
+      throw new IllegalArgumentException("Can't deserialize SortField - unknown type " + type);
+    }
+  }
+
+  private void serialize(DataOutput out) throws IOException {
+    out.writeString(field);
+    out.writeString(type.toString());
+    out.writeInt(reverse ? 1 : 0);
+    if (missingValue == null) {
+      out.writeInt(0);
+    }
+    else {
+      out.writeInt(1);
+      switch (type) {
+        case STRING:
+          if (missingValue == STRING_LAST) {
+            out.writeInt(0);
+          }
+          else if (missingValue == STRING_FIRST) {
+            out.writeInt(1);
+          }
+          else {
+            throw new IllegalArgumentException("Cannot serialize missing value of " + missingValue + " for type STRING");
+          }
+          break;
+        case INT:
+          out.writeInt((int)missingValue);
+          break;
+        case LONG:
+          out.writeLong((long)missingValue);
+          break;
+        case FLOAT:
+          out.writeInt(NumericUtils.floatToSortableInt((float)missingValue));
+          break;
+        case DOUBLE:
+          out.writeLong(NumericUtils.doubleToSortableLong((double)missingValue));
+          break;
+        default:
+          throw new IllegalArgumentException("Cannot serialize SortField of type " + type);
+      }
+    }
+  }
+
   /** Pass this to {@link #setMissingValue} to have missing
    *  string values sort first. */
   public final static Object STRING_FIRST = new Object() {
@@ -392,4 +498,33 @@ public class SortField {
   public boolean needsScores() {
     return type == Type.SCORE;
   }
+
+  /**
+   * Returns an {@link IndexSorter} used for sorting index segments by this SortField.
+   *
+   * If the SortField cannot be used for index sorting (for example, if it uses scores or
+   * other query-dependent values) then this method should return {@code null}
+   *
+   * SortFields that implement this method should also implement a companion
+   * {@link SortFieldProvider} to serialize and deserialize the sort in index segment
+   * headers
+   *
+   * @lucene.experimental
+   */
+  public IndexSorter getIndexSorter() {
+    switch (type) {
+      case STRING:
+        return new IndexSorter.StringSorter(Provider.NAME, missingValue, reverse, reader -> DocValues.getSorted(reader, field));
+      case INT:
+        return new IndexSorter.IntSorter(Provider.NAME, (Integer)missingValue, reverse, reader -> DocValues.getNumeric(reader, field));
+      case LONG:
+        return new IndexSorter.LongSorter(Provider.NAME, (Long)missingValue, reverse, reader -> DocValues.getNumeric(reader, field));
+      case DOUBLE:
+        return new IndexSorter.DoubleSorter(Provider.NAME, (Double)missingValue, reverse, reader -> DocValues.getNumeric(reader, field));
+      case FLOAT:
+        return new IndexSorter.FloatSorter(Provider.NAME, (Float)missingValue, reverse, reader -> DocValues.getNumeric(reader, field));
+      default: return null;
+    }
+  }
+
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java
index fff000b..6c5154a 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortedNumericSortField.java
@@ -20,9 +20,15 @@ package org.apache.lucene.search;
 import java.io.IOException;
 
 import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.IndexSorter;
+import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.NumericDocValues;
+import org.apache.lucene.index.SortFieldProvider;
 import org.apache.lucene.index.SortedNumericDocValues;
+import org.apache.lucene.store.DataInput;
+import org.apache.lucene.store.DataOutput;
+import org.apache.lucene.util.NumericUtils;
 
 /** 
  * SortField for {@link SortedNumericDocValues}.
@@ -83,6 +89,86 @@ public class SortedNumericSortField extends SortField {
     this.type = type;
   }
 
+  /** A SortFieldProvider for this sort field */
+  public static final class Provider extends SortFieldProvider {
+
+    /** The name this provider is registered under */
+    public static final String NAME = "SortedNumericSortField";
+
+    /** Creates a new Provider */
+    public Provider() {
+      super(NAME);
+    }
+
+    @Override
+    public SortField readSortField(DataInput in) throws IOException {
+      SortedNumericSortField sf = new SortedNumericSortField(in.readString(), readType(in), in.readInt() == 1, readSelectorType(in));
+      if (in.readInt() == 1) {
+        switch (sf.type) {
+          case INT:
+            sf.setMissingValue(in.readInt());
+            break;
+          case LONG:
+            sf.setMissingValue(in.readLong());
+            break;
+          case FLOAT:
+            sf.setMissingValue(NumericUtils.sortableIntToFloat(in.readInt()));
+            break;
+          case DOUBLE:
+            sf.setMissingValue(NumericUtils.sortableLongToDouble(in.readLong()));
+            break;
+          default:
+            throw new AssertionError();
+        }
+      }
+      return sf;
+    }
+
+    @Override
+    public void writeSortField(SortField sf, DataOutput out) throws IOException {
+      assert sf instanceof SortedNumericSortField;
+      ((SortedNumericSortField)sf).serialize(out);
+    }
+  }
+
+  private static SortedNumericSelector.Type readSelectorType(DataInput in) throws IOException {
+    int selectorType = in.readInt();
+    if (selectorType >= SortedNumericSelector.Type.values().length) {
+      throw new IllegalArgumentException("Can't deserialize SortedNumericSortField - unknown selector type " + selectorType);
+    }
+    return SortedNumericSelector.Type.values()[selectorType];
+  }
+
+  private void serialize(DataOutput out) throws IOException {
+    out.writeString(getField());
+    out.writeString(type.toString());
+    out.writeInt(reverse ? 1 : 0);
+    out.writeInt(selector.ordinal());
+    if (missingValue == null) {
+      out.writeInt(0);
+    }
+    else {
+      out.writeInt(1);
+      // oh for switch expressions...
+      switch (type) {
+        case INT:
+          out.writeInt((int)missingValue);
+          break;
+        case LONG:
+          out.writeLong((long)missingValue);
+          break;
+        case FLOAT:
+          out.writeInt(NumericUtils.floatToSortableInt((float)missingValue));
+          break;
+        case DOUBLE:
+          out.writeLong(NumericUtils.doubleToSortableLong((double)missingValue));
+          break;
+        default:
+          throw new AssertionError();
+      }
+    }
+  }
+
   /** Returns the numeric type in use for this sort */
   public SortField.Type getNumericType() {
     return type;
@@ -170,4 +256,24 @@ public class SortedNumericSortField extends SortField {
         throw new AssertionError();
     }
   }
+
+  private NumericDocValues getValue(LeafReader reader) throws IOException {
+    return SortedNumericSelector.wrap(DocValues.getSortedNumeric(reader, getField()), selector, type);
+  }
+
+  @Override
+  public IndexSorter getIndexSorter() {
+    switch(type) {
+      case INT:
+        return new IndexSorter.IntSorter(Provider.NAME, (Integer)missingValue, reverse, this::getValue);
+      case LONG:
+        return new IndexSorter.LongSorter(Provider.NAME, (Long)missingValue, reverse, this::getValue);
+      case DOUBLE:
+        return new IndexSorter.DoubleSorter(Provider.NAME, (Double)missingValue, reverse, this::getValue);
+      case FLOAT:
+        return new IndexSorter.FloatSorter(Provider.NAME, (Float)missingValue, reverse, this::getValue);
+      default:
+        throw new AssertionError();
+    }
+  }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java b/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java
index b095c6e..2321a66 100644
--- a/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java
+++ b/lucene/core/src/java/org/apache/lucene/search/SortedSetSortField.java
@@ -16,13 +16,17 @@
  */
 package org.apache.lucene.search;
 
-
 import java.io.IOException;
 
 import org.apache.lucene.index.DocValues;
+import org.apache.lucene.index.IndexSorter;
+import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.SortFieldProvider;
 import org.apache.lucene.index.SortedDocValues;
 import org.apache.lucene.index.SortedSetDocValues;
+import org.apache.lucene.store.DataInput;
+import org.apache.lucene.store.DataOutput;
 
 /** 
  * SortField for {@link SortedSetDocValues}.
@@ -68,6 +72,60 @@ public class SortedSetSortField extends SortField {
     }
     this.selector = selector;
   }
+
+  /** A SortFieldProvider for this sort */
+  public static final class Provider extends SortFieldProvider {
+
+    /** The name this provider is registered under */
+    public static final String NAME = "SortedSetSortField";
+
+    /** Creates a new Provider */
+    public Provider() {
+      super(NAME);
+    }
+
+    @Override
+    public SortField readSortField(DataInput in) throws IOException {
+      SortField sf = new SortedSetSortField(in.readString(), in.readInt() == 1, readSelectorType(in));
+      int missingValue = in.readInt();
+      if (missingValue == 1) {
+        sf.setMissingValue(SortField.STRING_FIRST);
+      }
+      else if (missingValue == 2) {
+        sf.setMissingValue(SortField.STRING_LAST);
+      }
+      return sf;
+    }
+
+    @Override
+    public void writeSortField(SortField sf, DataOutput out) throws IOException {
+      assert sf instanceof SortedSetSortField;
+      ((SortedSetSortField)sf).serialize(out);
+    }
+  }
+
+  private static SortedSetSelector.Type readSelectorType(DataInput in) throws IOException {
+    int type = in.readInt();
+    if (type >= SortedSetSelector.Type.values().length) {
+      throw new IllegalArgumentException("Cannot deserialize SortedSetSortField: unknown selector type " + type);
+    }
+    return SortedSetSelector.Type.values()[type];
+  }
+
+  private void serialize(DataOutput out) throws IOException {
+    out.writeString(getField());
+    out.writeInt(reverse ? 1 : 0);
+    out.writeInt(selector.ordinal());
+    if (missingValue == SortField.STRING_FIRST) {
+      out.writeInt(1);
+    }
+    else if (missingValue == SortField.STRING_LAST) {
+      out.writeInt(2);
+    }
+    else {
+      out.writeInt(0);
+    }
+  }
   
   /** Returns the selector in use for this sort */
   public SortedSetSelector.Type getSelector() {
@@ -126,4 +184,13 @@ public class SortedSetSortField extends SortField {
       }
     };
   }
+
+  private SortedDocValues getValues(LeafReader reader) throws IOException {
+    return SortedSetSelector.wrap(DocValues.getSortedSet(reader, getField()), selector);
+  }
+
+  @Override
+  public IndexSorter getIndexSorter() {
+    return new IndexSorter.StringSorter(Provider.NAME, missingValue, reverse, this::getValues);
+  }
 }
diff --git a/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec b/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
index 611e237..2897a8a 100644
--- a/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
+++ b/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
@@ -13,4 +13,4 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 
-org.apache.lucene.codecs.lucene84.Lucene84Codec
+org.apache.lucene.codecs.lucene86.Lucene86Codec
diff --git a/lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider b/lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider
new file mode 100644
index 0000000..a96a47b
--- /dev/null
+++ b/lucene/core/src/resources/META-INF/services/org.apache.lucene.index.SortFieldProvider
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.lucene.search.SortField$Provider
+org.apache.lucene.search.SortedNumericSortField$Provider
+org.apache.lucene.search.SortedSetSortField$Provider
\ No newline at end of file
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
index 6a3ce93..cccee73 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
@@ -17,9 +17,10 @@
 package org.apache.lucene.codecs.lucene50;
 
 
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.StoredField;
 import org.apache.lucene.index.BaseStoredFieldsFormatTestCase;
@@ -28,12 +29,10 @@ import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.store.Directory;
 
-import com.carrotsearch.randomizedtesting.generators.RandomPicks;
-
 public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFieldsFormatTestCase {
   @Override
   protected Codec getCodec() {
-    return new Lucene84Codec(Mode.BEST_COMPRESSION);
+    return new Lucene86Codec(Mode.BEST_COMPRESSION);
   }
   
   /**
@@ -44,7 +43,7 @@ public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFie
     Directory dir = newDirectory();
     for (int i = 0; i < 10; i++) {
       IndexWriterConfig iwc = newIndexWriterConfig();
-      iwc.setCodec(new Lucene84Codec(RandomPicks.randomFrom(random(), Mode.values())));
+      iwc.setCodec(new Lucene86Codec(RandomPicks.randomFrom(random(), Mode.values())));
       IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig());
       Document doc = new Document();
       doc.add(new StoredField("field1", "value1"));
@@ -71,7 +70,7 @@ public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFie
   
   public void testInvalidOptions() {
     expectThrows(NullPointerException.class, () -> {
-      new Lucene84Codec(null);
+      new Lucene86Codec(null);
     });
 
     expectThrows(NullPointerException.class, () -> {
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java
index 4eadf05..b6e7268 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene80/TestLucene80NormsFormat.java
@@ -18,14 +18,14 @@ package org.apache.lucene.codecs.lucene80;
 
 
 import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.lucene.index.BaseNormsFormatTestCase;
 
 /**
  * Tests Lucene80NormsFormat
  */
 public class TestLucene80NormsFormat extends BaseNormsFormatTestCase {
-  private final Codec codec = new Lucene84Codec();
+  private final Codec codec = new Lucene86Codec();
   
   @Override
   protected Codec getCodec() {
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java
similarity index 91%
rename from lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
rename to lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java
index 3bf6a18..e462d3f 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70SegmentInfoFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene86/TestLucene86SegmentInfoFormat.java
@@ -14,14 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.codecs.lucene70;
+
+package org.apache.lucene.codecs.lucene86;
 
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.index.BaseSegmentInfoFormatTestCase;
 import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.Version;
 
-public class TestLucene70SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
+public class TestLucene86SegmentInfoFormat extends BaseSegmentInfoFormatTestCase {
 
   @Override
   protected Version[] getVersions() {
@@ -32,4 +33,5 @@ public class TestLucene70SegmentInfoFormat extends BaseSegmentInfoFormatTestCase
   protected Codec getCodec() {
     return TestUtil.getDefaultCodec();
   }
+
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
index ae944de..042e2a8 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexSorting.java
@@ -2146,7 +2146,7 @@ public class TestIndexSorting extends LuceneTestCase {
     IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, () -> {
       iwc.setIndexSort(Sort.RELEVANCE);
     });
-    assertEquals("invalid SortField type: must be one of [STRING, INT, FLOAT, LONG, DOUBLE] but got: <score>", expected.getMessage());
+    assertEquals("Cannot sort index with sort field <score>", expected.getMessage());
   }
 
   // you can't change the index sort on an existing index:
@@ -2498,6 +2498,7 @@ public class TestIndexSorting extends LuceneTestCase {
         System.out.println("  float=" + docValues.floatValue);
         System.out.println("  double=" + docValues.doubleValue);
         System.out.println("  bytes=" + new BytesRef(docValues.bytesValue));
+        System.out.println("  mvf=" + Arrays.toString(docValues.floatValues));
       }
 
       Document doc = new Document();
@@ -2741,7 +2742,7 @@ public class TestIndexSorting extends LuceneTestCase {
         Document doc = new Document();
         doc.add(dvs.get(j));
         IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> w.addDocument(doc));
-        assertThat(exc.getMessage(), containsString("invalid doc value type"));
+        assertThat(exc.getMessage(), containsString("expected field [field] to be "));
         doc.clear();
         doc.add(dvs.get(i));
         w.addDocument(doc);
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
index ee778ed..d982953 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
@@ -396,7 +396,7 @@ public class TestPointValues extends LuceneTestCase {
   public void testDifferentCodecs1() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     IndexWriter w = new IndexWriter(dir, iwc);
     Document doc = new Document();
     doc.add(new IntPoint("int", 1));
@@ -427,7 +427,7 @@ public class TestPointValues extends LuceneTestCase {
     w.close();
     
     iwc = new IndexWriterConfig(new MockAnalyzer(random()));
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     w = new IndexWriter(dir, iwc);
     doc = new Document();
     doc.add(new IntPoint("int", 1));
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
index 2ccfd9a..3400f0e 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
@@ -96,7 +96,7 @@ public class TestBoolean2 extends LuceneTestCase {
 
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     // randomized codecs are sometimes too costly for this test:
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     iwc.setMergePolicy(newLogMergePolicy());
     RandomIndexWriter writer= new RandomIndexWriter(random(), directory, iwc);
     // we'll make a ton of docs, disable store/norms/vectors
@@ -141,7 +141,7 @@ public class TestBoolean2 extends LuceneTestCase {
     iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     // we need docID order to be preserved:
     // randomized codecs are sometimes too costly for this test:
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     iwc.setMergePolicy(newLogMergePolicy());
     try (IndexWriter w = new IndexWriter(singleSegmentDirectory, iwc)) {
       w.forceMerge(1, true);
@@ -167,7 +167,7 @@ public class TestBoolean2 extends LuceneTestCase {
 
       iwc = newIndexWriterConfig(new MockAnalyzer(random()));
       // randomized codecs are sometimes too costly for this test:
-      iwc.setCodec(Codec.forName("Lucene84"));
+      iwc.setCodec(Codec.forName("Lucene86"));
       RandomIndexWriter w = new RandomIndexWriter(random(), dir2, iwc);
       w.addIndexes(copy);
       copy.close();
@@ -179,7 +179,7 @@ public class TestBoolean2 extends LuceneTestCase {
     iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000));
     // randomized codecs are sometimes too costly for this test:
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     RandomIndexWriter w = new RandomIndexWriter(random(), dir2, iwc);
 
     doc = new Document();
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
index b2d5b03..a14204c 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
@@ -243,7 +243,7 @@ public class TestFloatPointNearestNeighbor extends LuceneTestCase {
 
   private IndexWriterConfig getIndexWriterConfig() {
     IndexWriterConfig iwc = newIndexWriterConfig();
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     return iwc;
   }
 }
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
index 3ea2a4e..a149ace 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
@@ -246,7 +246,7 @@ public class TestNearest extends LuceneTestCase {
 
   private IndexWriterConfig getIndexWriterConfig() {
     IndexWriterConfig iwc = newIndexWriterConfig();
-    iwc.setCodec(Codec.forName("Lucene84"));
+    iwc.setCodec(Codec.forName("Lucene86"));
     return iwc;
   }
 }
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
index 9d80476..12c8902 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
@@ -16,9 +16,6 @@
  */
 package org.apache.lucene.search.suggest.document;
 
-import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
-import static org.hamcrest.core.IsEqual.equalTo;
-
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -32,6 +29,7 @@ import java.util.Set;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.CyclicBarrier;
 
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.TokenFilter;
@@ -41,7 +39,7 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
 import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.IntPoint;
@@ -69,7 +67,8 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+import static org.apache.lucene.analysis.BaseTokenStreamTestCase.assertTokenStreamContents;
+import static org.hamcrest.core.IsEqual.equalTo;
 
 public class TestSuggestField extends LuceneTestCase {
 
@@ -888,7 +887,7 @@ public class TestSuggestField extends LuceneTestCase {
   static IndexWriterConfig iwcWithSuggestField(Analyzer analyzer, final Set<String> suggestFields) {
     IndexWriterConfig iwc = newIndexWriterConfig(random(), analyzer);
     iwc.setMergePolicy(newLogMergePolicy());
-    Codec filterCodec = new Lucene84Codec() {
+    Codec filterCodec = new Lucene86Codec() {
       CompletionPostingsFormat.FSTLoadMode fstLoadMode =
           RandomPicks.randomFrom(random(), CompletionPostingsFormat.FSTLoadMode.values());
       PostingsFormat postingsFormat = new Completion84PostingsFormat(fstLoadMode);
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
index e67e2a7..135ff38 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
@@ -1276,7 +1276,7 @@ public abstract class BaseGeoPointTestCase extends LuceneTestCase {
     // Else seeds may not reproduce:
     iwc.setMergeScheduler(new SerialMergeScheduler());
     int pointsInLeaf = 2 + random().nextInt(4);
-    iwc.setCodec(new FilterCodec("Lucene84", TestUtil.getDefaultCodec()) {
+    iwc.setCodec(new FilterCodec("Lucene86", TestUtil.getDefaultCodec()) {
       @Override
       public PointsFormat pointsFormat() {
         return new PointsFormat() {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java
index a597ca5..eb7be93 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseXYPointTestCase.java
@@ -1190,7 +1190,7 @@ public abstract class BaseXYPointTestCase extends LuceneTestCase {
     // Else seeds may not reproduce:
     iwc.setMergeScheduler(new SerialMergeScheduler());
     int pointsInLeaf = 2 + random().nextInt(4);
-    iwc.setCodec(new FilterCodec("Lucene84", TestUtil.getDefaultCodec()) {
+    iwc.setCodec(new FilterCodec("Lucene86", TestUtil.getDefaultCodec()) {
       @Override
       public PointsFormat pointsFormat() {
         return new PointsFormat() {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
index bd59e8c..aef11ac 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
@@ -16,17 +16,6 @@
  */
 package org.apache.lucene.util;
 
-import static org.apache.lucene.util.LuceneTestCase.INFOSTREAM;
-import static org.apache.lucene.util.LuceneTestCase.TEST_CODEC;
-import static org.apache.lucene.util.LuceneTestCase.TEST_DOCVALUESFORMAT;
-import static org.apache.lucene.util.LuceneTestCase.TEST_POSTINGSFORMAT;
-import static org.apache.lucene.util.LuceneTestCase.VERBOSE;
-import static org.apache.lucene.util.LuceneTestCase.assumeFalse;
-import static org.apache.lucene.util.LuceneTestCase.localeForLanguageTag;
-import static org.apache.lucene.util.LuceneTestCase.random;
-import static org.apache.lucene.util.LuceneTestCase.randomLocale;
-import static org.apache.lucene.util.LuceneTestCase.randomTimeZone;
-
 import java.io.PrintStream;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -34,6 +23,8 @@ import java.util.Locale;
 import java.util.Random;
 import java.util.TimeZone;
 
+import com.carrotsearch.randomizedtesting.RandomizedContext;
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.DocValuesFormat;
 import org.apache.lucene.codecs.PostingsFormat;
@@ -43,7 +34,7 @@ import org.apache.lucene.codecs.asserting.AssertingPostingsFormat;
 import org.apache.lucene.codecs.cheapbastard.CheapBastardCodec;
 import org.apache.lucene.codecs.compressing.CompressingCodec;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.lucene.codecs.mockrandom.MockRandomPostingsFormat;
 import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
 import org.apache.lucene.index.RandomCodec;
@@ -54,8 +45,16 @@ import org.apache.lucene.util.LuceneTestCase.LiveIWCFlushMode;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.junit.internal.AssumptionViolatedException;
 
-import com.carrotsearch.randomizedtesting.RandomizedContext;
-import com.carrotsearch.randomizedtesting.generators.RandomPicks;
+import static org.apache.lucene.util.LuceneTestCase.INFOSTREAM;
+import static org.apache.lucene.util.LuceneTestCase.TEST_CODEC;
+import static org.apache.lucene.util.LuceneTestCase.TEST_DOCVALUESFORMAT;
+import static org.apache.lucene.util.LuceneTestCase.TEST_POSTINGSFORMAT;
+import static org.apache.lucene.util.LuceneTestCase.VERBOSE;
+import static org.apache.lucene.util.LuceneTestCase.assumeFalse;
+import static org.apache.lucene.util.LuceneTestCase.localeForLanguageTag;
+import static org.apache.lucene.util.LuceneTestCase.random;
+import static org.apache.lucene.util.LuceneTestCase.randomLocale;
+import static org.apache.lucene.util.LuceneTestCase.randomTimeZone;
 
 /**
  * Setup and restore suite-level environment (fine grained junk that 
@@ -189,7 +188,7 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
     } else if ("Compressing".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 6 && !shouldAvoidCodec("Compressing"))) {
       codec = CompressingCodec.randomInstance(random);
     } else if ("Lucene84".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 5 && !shouldAvoidCodec("Lucene84"))) {
-      codec = new Lucene84Codec(RandomPicks.randomFrom(random, Lucene50StoredFieldsFormat.Mode.values())
+      codec = new Lucene86Codec(RandomPicks.randomFrom(random, Lucene50StoredFieldsFormat.Mode.values())
       );
     } else if (!"random".equals(TEST_CODEC)) {
       codec = Codec.forName(TEST_CODEC);
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index f0697f9..2dc9ead 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -53,8 +53,8 @@ import org.apache.lucene.codecs.asserting.AssertingCodec;
 import org.apache.lucene.codecs.blockterms.LuceneFixedGap;
 import org.apache.lucene.codecs.blocktreeords.BlockTreeOrdsPostingsFormat;
 import org.apache.lucene.codecs.lucene80.Lucene80DocValuesFormat;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
 import org.apache.lucene.codecs.lucene84.Lucene84PostingsFormat;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
 import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
 import org.apache.lucene.document.BinaryDocValuesField;
@@ -919,7 +919,7 @@ public final class TestUtil {
    * This may be different than {@link Codec#getDefault()} because that is randomized. 
    */
   public static Codec getDefaultCodec() {
-    return new Lucene84Codec();
+    return new Lucene86Codec();
   }
   
   /** 
diff --git a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
index fa34edd..8a591c8 100644
--- a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
@@ -24,7 +24,7 @@ import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.DocValuesFormat;
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
-import org.apache.lucene.codecs.lucene84.Lucene84Codec;
+import org.apache.lucene.codecs.lucene86.Lucene86Codec;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.NamedList;
@@ -91,7 +91,7 @@ public class SchemaCodecFactory extends CodecFactory implements SolrCoreAware {
       compressionMode = SOLR_DEFAULT_COMPRESSION_MODE;
       log.debug("Using default compressionMode: {}", compressionMode);
     }
-    codec = new Lucene84Codec(compressionMode) {
+    codec = new Lucene86Codec(compressionMode) {
       @Override
       public PostingsFormat getPostingsFormatForField(String field) {
         final SchemaField schemaField = core.getLatestSchema().getFieldOrNull(field);


[lucene-solr] 17/47: SOLR-14461: Replace commons-fileupload with Jetty (#1490)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 57456a9079e4d8ba062653eb4c6b6709ea09d252
Author: David Smiley <ds...@apache.org>
AuthorDate: Fri May 22 00:34:48 2020 -0400

    SOLR-14461: Replace commons-fileupload with Jetty (#1490)
---
 lucene/ivy-versions.properties                     |   1 -
 solr/CHANGES.txt                                   |   3 +
 solr/core/build.gradle                             |   1 -
 solr/core/ivy.xml                                  |   1 -
 .../apache/solr/servlet/SolrDispatchFilter.java    |  18 +-
 .../apache/solr/servlet/SolrRequestParsers.java    | 149 ++++++++-------
 .../apache/solr/util/SolrFileCleaningTracker.java  | 147 ---------------
 solr/licenses/commons-fileupload-1.3.3.jar.sha1    |   1 -
 solr/licenses/commons-fileupload-LICENSE-ASL.txt   | 202 ---------------------
 solr/licenses/commons-fileupload-NOTICE.txt        |   5 -
 versions.lock                                      |   3 +-
 versions.props                                     |   1 -
 12 files changed, 90 insertions(+), 442 deletions(-)

diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 0e0e65d..3fccbfb 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -55,7 +55,6 @@ com.sun.jersey.version = 1.19
 /commons-cli/commons-cli = 1.4
 /commons-codec/commons-codec = 1.13
 /commons-collections/commons-collections = 3.2.2
-/commons-fileupload/commons-fileupload = 1.3.3
 /commons-io/commons-io = 2.6
 /commons-logging/commons-logging = 1.1.3
 /de.l3s.boilerpipe/boilerpipe = 1.1.0
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 63193d7..2629407 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -233,6 +233,8 @@ Other Changes
 
 * SOLR-14173: Major redesign of the Solr Reference Guide (Cassandra Targett)
 
+* SOLR-14461: Replaced commons-fileupload dependency with using Jetty's facilities. (David Smiley)
+
 * SOLR-14466: Upgrade log4j2 to latest release (2.13.2) (Erick Erickson)
 
 * SOLR-11934: Visit Solr logging, it's too noisy. Note particularly that the messages for
@@ -244,6 +246,7 @@ Other Changes
   Erick Erickson)
 
 * SOLR-14482: Fix or suppress warnings in solr/search/facet (Erick Erickson)
+
 ==================  8.5.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/build.gradle b/solr/core/build.gradle
index 6e9a068..71002c2 100644
--- a/solr/core/build.gradle
+++ b/solr/core/build.gradle
@@ -75,7 +75,6 @@ dependencies {
   })
 
   implementation 'com.tdunning:t-digest'
-  implementation 'commons-fileupload:commons-fileupload'
 
   implementation 'io.opentracing:opentracing-api'
   implementation 'io.opentracing:opentracing-noop'
diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml
index 19d74f3..c632c47 100644
--- a/solr/core/ivy.xml
+++ b/solr/core/ivy.xml
@@ -38,7 +38,6 @@
     <dependency org="commons-codec" name="commons-codec" rev="${/commons-codec/commons-codec}" conf="compile"/>
     <dependency org="commons-io" name="commons-io" rev="${/commons-io/commons-io}" conf="compile"/>
     <dependency org="org.apache.commons" name="commons-exec" rev="${/org.apache.commons/commons-exec}" conf="compile"/>
-    <dependency org="commons-fileupload" name="commons-fileupload" rev="${/commons-fileupload/commons-fileupload}" conf="compile"/>
     <dependency org="commons-cli" name="commons-cli" rev="${/commons-cli/commons-cli}" conf="compile"/>
     <dependency org="org.apache.commons" name="commons-text" rev="${/org.apache.commons/commons-text}" conf="compile"/>
     <dependency org="com.google.guava" name="guava" rev="${/com.google.guava/guava}" conf="compile"/>
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index eb2f74a..ae183fe 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -58,7 +58,6 @@ import io.opentracing.Span;
 import io.opentracing.SpanContext;
 import io.opentracing.Tracer;
 import io.opentracing.tag.Tags;
-import org.apache.commons.io.FileCleaningTracker;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.http.HttpHeaders;
 import org.apache.http.client.HttpClient;
@@ -83,7 +82,6 @@ import org.apache.solr.security.AuditEvent;
 import org.apache.solr.security.AuthenticationPlugin;
 import org.apache.solr.security.PKIAuthenticationPlugin;
 import org.apache.solr.security.PublicKeyHandler;
-import org.apache.solr.util.SolrFileCleaningTracker;
 import org.apache.solr.util.tracing.GlobalTracer;
 import org.apache.solr.util.StartupLoggingUtils;
 import org.apache.solr.util.configuration.SSLConfigurationsFactory;
@@ -152,8 +150,6 @@ public class SolrDispatchFilter extends BaseSolrFilter {
     CoreContainer coresInit = null;
     try{
 
-    SolrRequestParsers.fileCleaningTracker = new SolrFileCleaningTracker();
-
     StartupLoggingUtils.checkLogDir();
     if (log.isInfoEnabled()) {
       log.info("Using logger factory {}", StartupLoggingUtils.getLoggerImplStr());
@@ -324,19 +320,6 @@ public class SolrDispatchFilter extends BaseSolrFilter {
     CoreContainer cc = cores;
     cores = null;
     try {
-      try {
-        FileCleaningTracker fileCleaningTracker = SolrRequestParsers.fileCleaningTracker;
-        if (fileCleaningTracker != null) {
-          fileCleaningTracker.exitWhenFinished();
-        }
-      } catch (NullPointerException e) {
-        // okay
-      } catch (Exception e) {
-        log.warn("Exception closing FileCleaningTracker", e);
-      } finally {
-        SolrRequestParsers.fileCleaningTracker = null;
-      }
-
       if (metricManager != null) {
         try {
           metricManager.unregisterGauges(registryName, metricTag);
@@ -456,6 +439,7 @@ public class SolrDispatchFilter extends BaseSolrFilter {
 
       GlobalTracer.get().clearContext();
       consumeInputFully(request, response);
+      SolrRequestParsers.cleanupMultipartFiles(request);
     }
   }
   
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
index c8e886a..067e97c 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
@@ -16,11 +16,14 @@
  */
 package org.apache.solr.servlet;
 
+import javax.servlet.MultipartConfigElement;
 import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.Part;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
@@ -38,10 +41,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.fileupload.FileItem;
-import org.apache.commons.fileupload.disk.DiskFileItemFactory;
-import org.apache.commons.fileupload.servlet.ServletFileUpload;
-import org.apache.commons.io.FileCleaningTracker;
 import org.apache.commons.io.input.CloseShieldInputStream;
 import org.apache.lucene.util.IOUtils;
 import org.apache.solr.api.V2HttpCall;
@@ -60,14 +59,21 @@ import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequestBase;
 import org.apache.solr.util.RTimerTree;
-import org.apache.solr.util.SolrFileCleaningTracker;
 import org.apache.solr.util.tracing.GlobalTracer;
+import org.eclipse.jetty.http.HttpFields;
+import org.eclipse.jetty.http.MimeTypes;
+import org.eclipse.jetty.server.MultiParts;
+import org.eclipse.jetty.server.Request;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.common.params.CommonParams.PATH;
 
 
-public class SolrRequestParsers 
-{
+public class SolrRequestParsers {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
   // Should these constants be in a more public place?
   public static final String MULTIPART = "multipart";
   public static final String FORMDATA = "formdata";
@@ -92,9 +98,7 @@ public class SolrRequestParsers
 
   /** Default instance for e.g. admin requests. Limits to 2 MB uploads and does not allow remote streams. */
   public static final SolrRequestParsers DEFAULT = new SolrRequestParsers();
-  
-  public static volatile SolrFileCleaningTracker fileCleaningTracker;
-  
+
   /**
    * Pass in an xml configuration.  A null configuration will enable
    * everything with maximum values.
@@ -534,29 +538,6 @@ public class SolrRequestParsers
     }
   }
 
-
-  /**
-   * Wrap a FileItem as a ContentStream
-   */
-  static class FileItemContentStream extends ContentStreamBase
-  {
-    private final FileItem item;
-    
-    public FileItemContentStream( FileItem f )
-    {
-      item = f;
-      contentType = item.getContentType();
-      name = item.getName();
-      sourceInfo = item.getFieldName();
-      size = item.getSize();
-    }
-      
-    @Override
-    public InputStream getStream() throws IOException {
-      return item.getInputStream();
-    }
-  }
-
   /**
    * The raw parser just uses the params directly
    */
@@ -571,59 +552,99 @@ public class SolrRequestParsers
     }
   }
 
-
-
   /**
    * Extract Multipart streams
    */
   static class MultipartRequestParser implements SolrRequestParser {
-    private final int uploadLimitKB;
-    private DiskFileItemFactory factory = new DiskFileItemFactory();
-    
-    public MultipartRequestParser(int limit) {
-      uploadLimitKB = limit;
+    private final MultipartConfigElement multipartConfigElement;
 
-      // Set factory constraints
-      FileCleaningTracker fct = fileCleaningTracker;
-      if (fct != null) {
-        factory.setFileCleaningTracker(fileCleaningTracker);
-      }
-      // TODO - configure factory.setSizeThreshold(yourMaxMemorySize);
-      // TODO - configure factory.setRepository(yourTempDirectory);
+    public MultipartRequestParser(int uploadLimitKB) {
+      multipartConfigElement = new MultipartConfigElement(
+          null, // temp dir (null=default)
+          -1, // maxFileSize  (-1=none)
+          uploadLimitKB * 1024, // maxRequestSize
+          100 * 1024 ); // fileSizeThreshold after which will go to disk
     }
     
     @Override
     public SolrParams parseParamsAndFillStreams(
         final HttpServletRequest req, ArrayList<ContentStream> streams) throws Exception {
-      if( !ServletFileUpload.isMultipartContent(req) ) {
+      if (!isMultipart(req)) {
         throw new SolrException( ErrorCode.BAD_REQUEST, "Not multipart content! "+req.getContentType() );
       }
-      
+      // Magic way to tell Jetty dynamically we want multi-part processing.  "Request" here is a Jetty class
+      req.setAttribute(Request.MULTIPART_CONFIG_ELEMENT, multipartConfigElement);
+
       MultiMapSolrParams params = parseQueryString( req.getQueryString() );
 
-      // Create a new file upload handler
-      ServletFileUpload upload = new ServletFileUpload(factory);
-      upload.setSizeMax( ((long) uploadLimitKB) * 1024L );
+      // IMPORTANT: the Parts will all have the delete() method called by cleanupMultipartFiles()
 
-      // Parse the request
-      List<FileItem> items = upload.parseRequest(req);
-      for (FileItem item : items) {
-        // If it's a form field, put it in our parameter map
-        if (item.isFormField()) {
+      for (Part part : req.getParts()) {
+        if (part.getSubmittedFileName() == null) { // thus a form field and not file upload
+          // If it's a form field, put it in our parameter map
+          String partAsString = org.apache.commons.io.IOUtils.toString(new PartContentStream(part).getReader());
           MultiMapSolrParams.addParam(
-            item.getFieldName().trim(),
-            item.getString(), params.getMap() );
-        }
-        // Add the stream
-        else {
-          streams.add( new FileItemContentStream( item ) );
+              part.getName().trim(),
+              partAsString, params.getMap() );
+        } else { // file upload
+          streams.add(new PartContentStream(part));
         }
       }
       return params;
     }
+
+    boolean isMultipart(HttpServletRequest req) {
+      // Jetty utilities
+      return MimeTypes.Type.MULTIPART_FORM_DATA.is(HttpFields.valueParameters(req.getContentType(), null));
+    }
+
+    /** Wrap a MultiPart-{@link Part} as a {@link ContentStream} */
+    static class PartContentStream extends ContentStreamBase {
+      private final Part part;
+
+      public PartContentStream(Part part ) {
+        this.part = part;
+        contentType = part.getContentType();
+        name = part.getName();
+        sourceInfo = part.getSubmittedFileName();
+        size = part.getSize();
+      }
+
+      @Override
+      public InputStream getStream() throws IOException {
+        return part.getInputStream();
+      }
+    }
   }
 
 
+  /** Clean up any tmp files created by MultiPartInputStream. */
+  static void cleanupMultipartFiles(HttpServletRequest request) {
+    // See Jetty MultiPartCleanerListener from which we drew inspiration
+    MultiParts multiParts = (MultiParts) request.getAttribute(Request.MULTIPARTS);
+    if (multiParts == null || multiParts.getContext() != request.getServletContext()) {
+      return;
+    }
+
+    log.debug("Deleting multipart files");
+
+    Collection<Part> parts;
+    try {
+      parts = multiParts.getParts();
+    } catch (IOException e) {
+      log.warn("Errors deleting multipart tmp files", e);
+      return;
+    }
+
+    for (Part part : parts) {
+      try {
+        part.delete();
+      } catch (IOException e) {
+        log.warn("Errors deleting multipart tmp files", e);
+      }
+    }
+  }
+
   /**
    * Extract application/x-www-form-urlencoded form data for POST requests
    */
@@ -791,7 +812,7 @@ public class SolrRequestParsers
         return formdata.parseParamsAndFillStreams(req, streams, input);
       }
 
-      if (ServletFileUpload.isMultipartContent(req)) {
+      if (multipart.isMultipart(req)) {
         return multipart.parseParamsAndFillStreams(req, streams);
       }
 
diff --git a/solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java b/solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java
deleted file mode 100644
index 9c66f0f..0000000
--- a/solr/core/src/java/org/apache/solr/util/SolrFileCleaningTracker.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.util;
-
-import java.io.File;
-import java.lang.ref.PhantomReference;
-import java.lang.ref.ReferenceQueue;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-
-import org.apache.commons.io.FileCleaningTracker;
-import org.apache.commons.io.FileDeleteStrategy;
-
-public class SolrFileCleaningTracker extends FileCleaningTracker {
-
-  ReferenceQueue<Object> q = new ReferenceQueue<>();
-
-  final Collection<Tracker> trackers = Collections.synchronizedSet(new HashSet<Tracker>());
-
-  final List<String> deleteFailures = Collections.synchronizedList(new ArrayList<String>());
-
-  volatile boolean exitWhenFinished = false;
-
-  Thread reaper;
-
-  public void track(final File file, final Object marker) {
-    track(file, marker, null);
-  }
-
-  public void track(final File file, final Object marker, final FileDeleteStrategy deleteStrategy) {
-    if (file == null) {
-      throw new NullPointerException("The file must not be null");
-    }
-    addTracker(file.getPath(), marker, deleteStrategy);
-  }
-
-  public void track(final String path, final Object marker) {
-    track(path, marker, null);
-  }
-
-  public void track(final String path, final Object marker, final FileDeleteStrategy deleteStrategy) {
-    if (path == null) {
-      throw new NullPointerException("The path must not be null");
-    }
-    addTracker(path, marker, deleteStrategy);
-  }
-
-  private synchronized void addTracker(final String path, final Object marker,
-      final FileDeleteStrategy deleteStrategy) {
-    if (exitWhenFinished) {
-      throw new IllegalStateException("No new trackers can be added once exitWhenFinished() is called");
-    }
-    if (reaper == null) {
-      reaper = new Reaper();
-      reaper.start();
-    }
-    trackers.add(new Tracker(path, deleteStrategy, marker, q));
-  }
-
-  public int getTrackCount() {
-    return trackers.size();
-  }
-
-  public List<String> getDeleteFailures() {
-    return deleteFailures;
-  }
-
-  public synchronized void exitWhenFinished() {
-    // synchronized block protects reaper
-    exitWhenFinished = true;
-    if (reaper != null) {
-      synchronized (reaper) {
-        reaper.interrupt();
-        try {
-          reaper.join();
-        } catch (InterruptedException e) { 
-          Thread.currentThread().interrupt();
-        }
-      }
-    }
-  }
-
-  private final class Reaper extends Thread {
-    Reaper() {
-      super("MultiPart Upload Tmp File Reaper");
-      setDaemon(true);
-    }
-
-    @Override
-    public void run() {
-      while (exitWhenFinished == false || trackers.size() > 0) {
-        try {
-          // Wait for a tracker to remove.
-          final Tracker tracker = (Tracker) q.remove(); // cannot return null
-          trackers.remove(tracker);
-          if (!tracker.delete()) {
-            deleteFailures.add(tracker.getPath());
-          }
-          tracker.clear();
-        } catch (final InterruptedException e) {
-          Thread.currentThread().interrupt();
-          break;
-        }
-      }
-    }
-  }
-
-  private static final class Tracker extends PhantomReference<Object> {
-
-    private final String path;
-
-    private final FileDeleteStrategy deleteStrategy;
-
-    Tracker(final String path, final FileDeleteStrategy deleteStrategy, final Object marker,
-        final ReferenceQueue<? super Object> queue) {
-      super(marker, queue);
-      this.path = path;
-      this.deleteStrategy = deleteStrategy == null ? FileDeleteStrategy.NORMAL : deleteStrategy;
-    }
-
-    public String getPath() {
-      return path;
-    }
-
-    public boolean delete() {
-      return deleteStrategy.deleteQuietly(new File(path));
-    }
-  }
-
-}
\ No newline at end of file
diff --git a/solr/licenses/commons-fileupload-1.3.3.jar.sha1 b/solr/licenses/commons-fileupload-1.3.3.jar.sha1
deleted file mode 100644
index d27deb4..0000000
--- a/solr/licenses/commons-fileupload-1.3.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-04ff14d809195b711fd6bcc87e6777f886730ca1
diff --git a/solr/licenses/commons-fileupload-LICENSE-ASL.txt b/solr/licenses/commons-fileupload-LICENSE-ASL.txt
deleted file mode 100644
index d645695..0000000
--- a/solr/licenses/commons-fileupload-LICENSE-ASL.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/solr/licenses/commons-fileupload-NOTICE.txt b/solr/licenses/commons-fileupload-NOTICE.txt
deleted file mode 100644
index bec42c0..0000000
--- a/solr/licenses/commons-fileupload-NOTICE.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Apache Commons FileUpload
-Copyright 2002-2008 The Apache Software Foundation
-
-This product includes software developed by
-The Apache Software Foundation (http://www.apache.org/).
diff --git a/versions.lock b/versions.lock
index 31088b2..7b22ab6 100644
--- a/versions.lock
+++ b/versions.lock
@@ -49,8 +49,7 @@ com.vaadin.external.google:android-json:0.0.20131108.vaadin1 (1 constraints: 340
 commons-cli:commons-cli:1.4 (1 constraints: a9041e2c)
 commons-codec:commons-codec:1.13 (1 constraints: d904f430)
 commons-collections:commons-collections:3.2.2 (1 constraints: 09050236)
-commons-fileupload:commons-fileupload:1.3.3 (1 constraints: 0905fc35)
-commons-io:commons-io:2.6 (2 constraints: bf145380)
+commons-io:commons-io:2.6 (1 constraints: ac04232c)
 commons-logging:commons-logging:1.1.3 (2 constraints: c8149e7f)
 de.l3s.boilerpipe:boilerpipe:1.1.0 (1 constraints: 0405f335)
 io.dropwizard.metrics:metrics-core:4.1.5 (5 constraints: 2543e4c0)
diff --git a/versions.props b/versions.props
index ea2a0a4..6a1c142 100644
--- a/versions.props
+++ b/versions.props
@@ -28,7 +28,6 @@ commons-beanutils:commons-beanutils=1.9.3
 commons-cli:commons-cli=1.4
 commons-codec:commons-codec=1.13
 commons-collections:commons-collections=3.2.2
-commons-fileupload:commons-fileupload=1.3.3
 commons-io:commons-io=2.6
 commons-logging:commons-logging=1.1.3
 de.l3s.boilerpipe:boilerpipe=1.1.0


[lucene-solr] 12/47: LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 17592d28a1daa70fe6158d07001ee02c7c2b0780
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Thu May 21 20:29:18 2020 -0400

    LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search
---
 lucene/CHANGES.txt                                 |  3 +++
 .../org/apache/lucene/search/TestFuzzyQuery.java   |  5 ++--
 .../apache/lucene/search/TestLRUQueryCache.java    | 17 ++++++++------
 .../lucene/search/TestSameScoresWithThreads.java   |  1 +
 .../apache/lucene/search/TestSearcherManager.java  |  1 +
 .../org/apache/lucene/search/TestTermQuery.java    |  7 +++++-
 .../search/uhighlight/UnifiedHighlighter.java      |  2 ++
 .../lucene/search/highlight/HighlighterTest.java   | 27 +++++++++++-----------
 .../lucene/search/highlight/TokenSourcesTest.java  |  2 ++
 .../highlight/custom/HighlightCustomQueryTest.java | 23 +++++++++---------
 .../lucene/search/TestTermAutomatonQuery.java      |  3 ++-
 .../suggest/analyzing/TestFreeTextSuggester.java   |  1 +
 .../suggest/analyzing/TestSuggestStopFilter.java   |  9 --------
 .../search/suggest/document/TestSuggestField.java  |  2 +-
 .../lucene/search/ShardSearchingTestBase.java      |  3 +--
 15 files changed, 59 insertions(+), 47 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index ab88bcb..46c7063 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -286,6 +286,9 @@ Build
 
 * Upgrade forbiddenapis to version 3.0.  (Uwe Schindler)
 
+* LUCENE-9376: Fix or suppress 20 resource leak precommit warnings in lucene/search
+  (Andras Salamon via Erick Erickson)
+
 ======================= Lucene 8.5.1 =======================
 
 Bug Fixes
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java
index 99376e3..b188612 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java
@@ -411,7 +411,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
   
   public void testGiga() throws Exception {
 
-    MockAnalyzer analyzer = new MockAnalyzer(random());
     Directory index = newDirectory();
     RandomIndexWriter w = new RandomIndexWriter(random(), index);
 
@@ -443,6 +442,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
     assertEquals(1, hits.length);
     assertEquals("Giga byte", searcher.doc(hits[0].doc).get("field"));
     r.close();
+    w.close();
     index.close();
   }
   
@@ -561,6 +561,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
       w.addDocument(doc);
     }
     DirectoryReader r = w.getReader();
+    w.close();
     //System.out.println("TEST: reader=" + r);
     IndexSearcher s = newSearcher(r);
     int iters = atLeast(200);
@@ -638,7 +639,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
       }
     }
     
-    IOUtils.close(r, w, dir);
+    IOUtils.close(r, dir);
   }
 
   private static class TermAndScore implements Comparable<TermAndScore> {
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index 7993beb..ef02375 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -181,14 +181,17 @@ public class TestLRUQueryCache extends LuceneTestCase {
       thread.join();
     }
 
-    if (error.get() != null) {
-      throw error.get();
+    try {
+      if (error.get() != null) {
+        throw error.get();
+      }
+      queryCache.assertConsistent();
+    } finally {
+      mgr.close();
+      w.close();
+      dir.close();
+      queryCache.assertConsistent();
     }
-    queryCache.assertConsistent();
-    mgr.close();
-    w.close();
-    dir.close();
-    queryCache.assertConsistent();
   }
 
   public void testLRUEviction() throws Exception {
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java b/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java
index a615a6a..4b284dfd 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSameScoresWithThreads.java
@@ -119,6 +119,7 @@ public class TestSameScoresWithThreads extends LuceneTestCase {
         thread.join();
       }
     }
+    docs.close();
     r.close();
     dir.close();
   }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
index b923866..1d8edcc 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestSearcherManager.java
@@ -310,6 +310,7 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
         new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()));
+    @SuppressWarnings("resource")
     SearcherManager sm = new SearcherManager(writer, false, false, new SearcherFactory());
     writer.addDocument(new Document());
     writer.commit();
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
index e460e26..65986d9 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTermQuery.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.CompositeReaderContext;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.FilterDirectoryReader;
 import org.apache.lucene.index.FilterLeafReader;
@@ -47,9 +48,13 @@ public class TestTermQuery extends LuceneTestCase {
     QueryUtils.checkUnequal(
         new TermQuery(new Term("foo", "bar")),
         new TermQuery(new Term("foo", "baz")));
+    final CompositeReaderContext context;
+    try (MultiReader multiReader = new MultiReader()) {
+      context = multiReader.getContext();
+    }
     QueryUtils.checkEqual(
         new TermQuery(new Term("foo", "bar")),
-        new TermQuery(new Term("foo", "bar"), TermStates.build(new MultiReader().getContext(), new Term("foo", "bar"), true)));
+        new TermQuery(new Term("foo", "bar"), TermStates.build(context, new Term("foo", "bar"), true)));
   }
 
   public void testCreateWeightDoesNotSeekIfScoresAreNotNeeded() throws IOException {
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
index 74de248..5d0dc94 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/uhighlight/UnifiedHighlighter.java
@@ -61,6 +61,7 @@ import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.search.Weight;
 import org.apache.lucene.search.spans.SpanQuery;
 import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.IOUtils;
 import org.apache.lucene.util.InPlaceMergeSorter;
 
 /**
@@ -643,6 +644,7 @@ public class UnifiedHighlighter {
 
       batchDocIdx += fieldValsByDoc.size();
     }
+    IOUtils.close(indexReaderWithTermVecCache);
     assert docIdIter.docID() == DocIdSetIterator.NO_MORE_DOCS
         || docIdIter.nextDoc() == DocIdSetIterator.NO_MORE_DOCS;
 
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
index 0c3a0f6..2e70317 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
@@ -1362,24 +1362,25 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
       public void run() throws Exception {
         HashMap<String,String> synonyms = new HashMap<>();
         synonyms.put("football", "soccer,footie");
-        Analyzer analyzer = new SynonymAnalyzer(synonyms);
+        try (Analyzer analyzer = new SynonymAnalyzer(synonyms)) {
 
-        String s = "football-soccer in the euro 2004 footie competition";
+          String s = "football-soccer in the euro 2004 footie competition";
 
-        BooleanQuery.Builder query = new BooleanQuery.Builder();
-        query.add(new TermQuery(new Term("bookid", "football")), Occur.SHOULD);
-        query.add(new TermQuery(new Term("bookid", "soccer")), Occur.SHOULD);
-        query.add(new TermQuery(new Term("bookid", "footie")), Occur.SHOULD);
+          BooleanQuery.Builder query = new BooleanQuery.Builder();
+          query.add(new TermQuery(new Term("bookid", "football")), Occur.SHOULD);
+          query.add(new TermQuery(new Term("bookid", "soccer")), Occur.SHOULD);
+          query.add(new TermQuery(new Term("bookid", "footie")), Occur.SHOULD);
 
-        Highlighter highlighter = getHighlighter(query.build(), null, HighlighterTest.this);
+          Highlighter highlighter = getHighlighter(query.build(), null, HighlighterTest.this);
 
-        // Get 3 best fragments and separate with a "..."
-        TokenStream tokenStream = analyzer.tokenStream(null, s);
+          // Get 3 best fragments and separate with a "..."
+          TokenStream tokenStream = analyzer.tokenStream(null, s);
 
-        String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
-        String expectedResult = "<B>football</B>-<B>soccer</B> in the euro 2004 <B>footie</B> competition";
-        assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
-            + " actual:" + result, expectedResult.equals(result));
+          String result = highlighter.getBestFragments(tokenStream, s, 3, "...");
+          String expectedResult = "<B>football</B>-<B>soccer</B> in the euro 2004 <B>footie</B> competition";
+          assertTrue("overlapping analyzer should handle highlights OK, expected:" + expectedResult
+              + " actual:" + result, expectedResult.equals(result));
+        }
       }
 
     };
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
index 825133c..30cf711 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java
@@ -391,6 +391,7 @@ public class TokenSourcesTest extends BaseTokenStreamTestCase {
       if (startOffsets[i] == startOffsets[i-1]) {
         if (VERBOSE)
           System.out.println("Skipping test because can't easily validate random token-stream is correct.");
+        rTokenStream.close();
         return;
       }
     }
@@ -438,6 +439,7 @@ public class TokenSourcesTest extends BaseTokenStreamTestCase {
 
     reader.close();
     dir.close();
+    rTokenStream.close();
   }
 
   public void testMaxStartOffsetConsistency() throws IOException {
diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
index b8ce3dd..115a51a9 100644
--- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
+++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/custom/HighlightCustomQueryTest.java
@@ -105,17 +105,18 @@ public class HighlightCustomQueryTest extends LuceneTestCase {
    */
   private String highlightField(Query query, String fieldName,
       String text) throws IOException, InvalidTokenOffsetsException {
-    TokenStream tokenStream = new MockAnalyzer(random(), MockTokenizer.SIMPLE,
-        true, MockTokenFilter.ENGLISH_STOPSET).tokenStream(fieldName, text);
-    // Assuming "<B>", "</B>" used to highlight
-    SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
-    MyQueryScorer scorer = new MyQueryScorer(query, fieldName, FIELD_NAME);
-    Highlighter highlighter = new Highlighter(formatter, scorer);
-    highlighter.setTextFragmenter(new SimpleFragmenter(Integer.MAX_VALUE));
-
-    String rv = highlighter.getBestFragments(tokenStream, text, 1,
-        "(FIELD TEXT TRUNCATED)");
-    return rv.length() == 0 ? text : rv;
+    try (MockAnalyzer mockAnalyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE,true,
+        MockTokenFilter.ENGLISH_STOPSET); TokenStream tokenStream = mockAnalyzer.tokenStream(fieldName, text)) {
+      // Assuming "<B>", "</B>" used to highlight
+      SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
+      MyQueryScorer scorer = new MyQueryScorer(query, fieldName, FIELD_NAME);
+      Highlighter highlighter = new Highlighter(formatter, scorer);
+      highlighter.setTextFragmenter(new SimpleFragmenter(Integer.MAX_VALUE));
+
+      String rv = highlighter.getBestFragments(tokenStream, text, 1,
+          "(FIELD TEXT TRUNCATED)");
+      return rv.length() == 0 ? text : rv;
+    }
   }
 
   public static class MyWeightedSpanTermExtractor extends
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
index 64fe4c7..a95f095 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java
@@ -444,6 +444,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
     }
 
     IndexReader r = w.getReader();
+    w.close();
     IndexSearcher s = newSearcher(r);
 
     // Used to match ANY using MultiPhraseQuery:
@@ -561,7 +562,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase {
       }
     }
 
-    IOUtils.close(w, r, dir, analyzer);
+    IOUtils.close(r, dir, analyzer);
   }
 
   private Set<String> toDocIDs(IndexSearcher s, TopDocs hits) throws IOException {
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
index 3e89275..530a4c3 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestFreeTextSuggester.java
@@ -192,6 +192,7 @@ public class TestFreeTextSuggester extends LuceneTestCase {
       }
     }
     analyzer.close();
+    lfd.close();
   }
 
   // Make sure you can suggest based only on unigram model:
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
index 5ed84e0..4dbccde 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/TestSuggestStopFilter.java
@@ -50,7 +50,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to "));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] {"go"},
                               new int[] {0},
@@ -69,8 +68,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to school"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] {"go", "school"},
                               new int[] {0, 6},
@@ -89,8 +86,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to a the school"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] { "go", "school" },
                               new int[] {0, 12},
@@ -109,8 +104,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to a the"));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] { "go", "the"},
                               new int[] {0, 8},
@@ -129,8 +122,6 @@ public class TestSuggestStopFilter extends BaseTokenStreamTestCase {
     Tokenizer stream = new MockTokenizer();
     stream.setReader(new StringReader("go to a the "));
     TokenStream filter = new SuggestStopFilter(stream, stopWords);
-
-    filter = new SuggestStopFilter(stream, stopWords);
     assertTokenStreamContents(filter,
                               new String[] { "go"},
                               new int[] {0},
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
index 1dbadc1..9d80476 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
@@ -761,7 +761,7 @@ public class TestSuggestField extends LuceneTestCase {
       }
       assertTrue("at least one of the entries should have the score", matched);
     }
-
+    lineFileDocs.close();
     reader.close();
     iw.close();
   }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
index 4f01cf7..a8f1b7d 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java
@@ -550,8 +550,7 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
   private final class ChangeIndices extends Thread {
     @Override
     public void run() {
-      try {
-        final LineFileDocs docs = new LineFileDocs(random());
+      try (final LineFileDocs docs = new LineFileDocs(random())) {
         int numDocs = 0;
         while (System.nanoTime() < endTimeNanos) {
           final int what = random().nextInt(3);


[lucene-solr] 25/47: SOLR-14474: Fix remaining auxilliary class warnings in Solr

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 09fa2a183c11c23f7dc1b1428c88faa5de3cdce8
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Wed May 27 12:06:29 2020 -0400

    SOLR-14474: Fix remaining auxilliary class warnings in Solr
---
 solr/CHANGES.txt                                   |   2 +
 .../org/apache/solr/cloud/ElectionContext.java     | 709 ---------------------
 .../apache/solr/cloud/OverseerElectionContext.java | 110 ++++
 ...ontext.java => ShardLeaderElectionContext.java} | 423 ++----------
 .../solr/cloud/ShardLeaderElectionContextBase.java | 194 ++++++
 .../handler/component/PivotFacetProcessor.java     |   4 +-
 .../solr/handler/component/StatsComponent.java     | 110 +---
 .../apache/solr/handler/component/StatsInfo.java   | 108 ++++
 .../export/{DoubleCmp.java => DoubleComp.java}     |  30 +-
 .../apache/solr/handler/export/ExportWriter.java   |  28 +-
 .../export/{FloatCmp.java => FloatComp.java}       |  30 +-
 .../org/apache/solr/handler/export/IntComp.java    |  32 +-
 .../handler/export/{LongCmp.java => LongComp.java} |  31 +-
 .../response/transform/ShardAugmenterFactory.java  |   2 +-
 .../response/transform/ValueAugmenterFactory.java  |  78 ++-
 .../org/apache/solr/search/facet/FacetParser.java  | 235 ++++++-
 .../org/apache/solr/search/facet/FacetRequest.java | 223 +------
 ...TermsCollector.java => GraphEdgeCollector.java} | 149 ++---
 .../org/apache/solr/search/join/GraphQuery.java    |   2 +-
 .../org/apache/solr/update/TransactionLog.java     | 160 ++---
 .../processor/DistributedZkUpdateProcessor.java    |   2 +-
 .../processor/RunUpdateProcessorFactory.java       | 123 ++--
 .../solr/search/facet/TestJsonFacetRefinement.java |   2 +-
 .../UpdateRequestProcessorFactoryTest.java         |   2 +-
 solr/solrj/src/java/org/noggit/CharArr.java        | 262 ++++----
 solr/solrj/src/java/org/noggit/JSONParser.java     |   2 +-
 26 files changed, 1190 insertions(+), 1863 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 0e09e49..d163d11 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -263,6 +263,8 @@ Other Changes
 
 * SOLR-14280: Improve error reporting in SolrConfig (Andras Salamon via Jason Gerlowski)
 
+* SOLR-14474: Fix remaining auxilliary class warnings in Solr (Erick Erickson)
+
 ==================  8.5.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index 9ba4900..1398570 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -19,50 +19,13 @@ package org.apache.solr.cloud;
 import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.lucene.search.MatchAllDocsQuery;
-import org.apache.solr.cloud.overseer.OverseerAction;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.ZkCmdExecutor;
-import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.RetryUtil;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.logging.MDCLoggingContext;
-import org.apache.solr.search.SolrIndexSearcher;
-import org.apache.solr.update.PeerSync;
-import org.apache.solr.update.UpdateLog;
-import org.apache.solr.util.RefCounted;
-import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
-import org.apache.zookeeper.KeeperException.NodeExistsException;
-import org.apache.zookeeper.KeeperException.SessionExpiredException;
-import org.apache.zookeeper.Op;
-import org.apache.zookeeper.OpResult;
-import org.apache.zookeeper.OpResult.SetDataResult;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.params.CommonParams.ID;
-
 public abstract class ElectionContext implements Closeable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   final String electionPath;
@@ -111,676 +74,4 @@ public abstract class ElectionContext implements Closeable {
   }
 }
 
-class ShardLeaderElectionContextBase extends ElectionContext {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  protected final SolrZkClient zkClient;
-  protected String shardId;
-  protected String collection;
-  protected LeaderElector leaderElector;
-  protected ZkStateReader zkStateReader;
-  protected ZkController zkController;
-  private Integer leaderZkNodeParentVersion;
-
-  // Prevents a race between cancelling and becoming leader.
-  private final Object lock = new Object();
-
-  public ShardLeaderElectionContextBase(LeaderElector leaderElector,
-      final String shardId, final String collection, final String coreNodeName,
-      ZkNodeProps props, ZkController zkController) {
-    super(coreNodeName, ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection
-        + "/leader_elect/" + shardId, ZkStateReader.getShardLeadersPath(
-        collection, shardId), props, zkController.getZkClient());
-    this.leaderElector = leaderElector;
-    this.zkStateReader = zkController.getZkStateReader();
-    this.zkClient = zkStateReader.getZkClient();
-    this.zkController = zkController;
-    this.shardId = shardId;
-    this.collection = collection;
-    
-    String parent = new Path(leaderPath).getParent().toString();
-    ZkCmdExecutor zcmd = new ZkCmdExecutor(30000);
-    // only if /collections/{collection} exists already do we succeed in creating this path
-    log.info("make sure parent is created {}", parent);
-    try {
-      zcmd.ensureExists(parent, (byte[])null, CreateMode.PERSISTENT, zkClient, 2);
-    } catch (KeeperException e) {
-      throw new RuntimeException(e);
-    } catch (InterruptedException e) {
-      Thread.currentThread().interrupt();
-      throw new RuntimeException(e);
-    }
-  }
-  
-  @Override
-  public void cancelElection() throws InterruptedException, KeeperException {
-    super.cancelElection();
-    synchronized (lock) {
-      if (leaderZkNodeParentVersion != null) {
-        try {
-          // We need to be careful and make sure we *only* delete our own leader registration node.
-          // We do this by using a multi and ensuring the parent znode of the leader registration node
-          // matches the version we expect - there is a setData call that increments the parent's znode
-          // version whenever a leader registers.
-          log.debug("Removing leader registration node on cancel: {} {}", leaderPath, leaderZkNodeParentVersion);
-          List<Op> ops = new ArrayList<>(2);
-          ops.add(Op.check(new Path(leaderPath).getParent().toString(), leaderZkNodeParentVersion));
-          ops.add(Op.delete(leaderPath, -1));
-          zkClient.multi(ops, true);
-        } catch (KeeperException.NoNodeException nne) {
-          // no problem
-          log.debug("No leader registration node found to remove: {}", leaderPath);
-        } catch (KeeperException.BadVersionException bve) {
-          log.info("Cannot remove leader registration node because the current registered node is not ours: {}", leaderPath);
-          // no problem
-        } catch (InterruptedException e) {
-          throw e;
-        } catch (Exception e) {
-          SolrException.log(log, e);
-        }
-        leaderZkNodeParentVersion = null;
-      } else {
-        log.info("No version found for ephemeral leader parent node, won't remove previous leader registration.");
-      }
-    }
-  }
-  
-  @Override
-  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs)
-      throws KeeperException, InterruptedException, IOException {
-    // register as leader - if an ephemeral is already there, wait to see if it goes away
-
-    String parent = new Path(leaderPath).getParent().toString();
-    try {
-      RetryUtil.retryOnThrowable(NodeExistsException.class, 60000, 5000, () -> {
-        synchronized (lock) {
-          log.info("Creating leader registration node {} after winning as {}", leaderPath, leaderSeqPath);
-          List<Op> ops = new ArrayList<>(2);
-
-          // We use a multi operation to get the parent nodes version, which will
-          // be used to make sure we only remove our own leader registration node.
-          // The setData call used to get the parent version is also the trigger to
-          // increment the version. We also do a sanity check that our leaderSeqPath exists.
-
-          ops.add(Op.check(leaderSeqPath, -1));
-          ops.add(Op.create(leaderPath, Utils.toJSON(leaderProps), zkClient.getZkACLProvider().getACLsToAdd(leaderPath), CreateMode.EPHEMERAL));
-          ops.add(Op.setData(parent, null, -1));
-          List<OpResult> results;
-
-          results = zkClient.multi(ops, true);
-          for (OpResult result : results) {
-            if (result.getType() == ZooDefs.OpCode.setData) {
-              SetDataResult dresult = (SetDataResult) result;
-              Stat stat = dresult.getStat();
-              leaderZkNodeParentVersion = stat.getVersion();
-              return;
-            }
-          }
-          assert leaderZkNodeParentVersion != null;
-        }
-      });
-    } catch (NoNodeException e) {
-      log.info("Will not register as leader because it seems the election is no longer taking place.");
-      return;
-    } catch (Throwable t) {
-      if (t instanceof OutOfMemoryError) {
-        throw (OutOfMemoryError) t;
-      }
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed", t);
-    } 
-    
-    assert shardId != null;
-    boolean isAlreadyLeader = false;
-    if (zkStateReader.getClusterState() != null &&
-        zkStateReader.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() < 2) {
-      Replica leader = zkStateReader.getLeader(collection, shardId);
-      if (leader != null
-          && leader.getBaseUrl().equals(leaderProps.get(ZkStateReader.BASE_URL_PROP))
-          && leader.getCoreName().equals(leaderProps.get(ZkStateReader.CORE_NAME_PROP))) {
-        isAlreadyLeader = true;
-      }
-    }
-    if (!isAlreadyLeader) {
-      ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
-          ZkStateReader.SHARD_ID_PROP, shardId,
-          ZkStateReader.COLLECTION_PROP, collection,
-          ZkStateReader.BASE_URL_PROP, leaderProps.get(ZkStateReader.BASE_URL_PROP),
-          ZkStateReader.CORE_NAME_PROP, leaderProps.get(ZkStateReader.CORE_NAME_PROP),
-          ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
-      assert zkController != null;
-      assert zkController.getOverseer() != null;
-      zkController.getOverseer().offerStateUpdate(Utils.toJSON(m));
-    }
-  }
-
-  public LeaderElector getLeaderElector() {
-    return leaderElector;
-  }
-
-  Integer getLeaderZkNodeParentVersion() {
-    synchronized (lock) {
-      return leaderZkNodeParentVersion;
-    }
-  }
-}
-
-// add core container and stop passing core around...
-final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
-  private final CoreContainer cc;
-  private final SyncStrategy syncStrategy;
-
-  private volatile boolean isClosed = false;
-  
-  public ShardLeaderElectionContext(LeaderElector leaderElector, 
-      final String shardId, final String collection,
-      final String coreNodeName, ZkNodeProps props, ZkController zkController, CoreContainer cc) {
-    super(leaderElector, shardId, collection, coreNodeName, props,
-        zkController);
-    this.cc = cc;
-    syncStrategy = new SyncStrategy(cc);
-  }
-  
-  @Override
-  public void close() {
-    super.close();
-    this.isClosed  = true;
-    syncStrategy.close();
-  }
-  
-  @Override
-  public void cancelElection() throws InterruptedException, KeeperException {
-    String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP);
-    try (SolrCore core = cc.getCore(coreName)) {
-      if (core != null) {
-        core.getCoreDescriptor().getCloudDescriptor().setLeader(false);
-      }
-    }
-    
-    super.cancelElection();
-  }
-  
-  @Override
-  public ElectionContext copy() {
-    return new ShardLeaderElectionContext(leaderElector, shardId, collection, id, leaderProps, zkController, cc);
-  }
-  
-  /* 
-   * weAreReplacement: has someone else been the leader already?
-   */
-  @Override
-  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStart) throws KeeperException,
- InterruptedException, IOException {
-    String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP);
-    ActionThrottle lt;
-    try (SolrCore core = cc.getCore(coreName)) {
-      if (core == null ) {
-        // shutdown or removed
-        return;
-      }
-      MDCLoggingContext.setCore(core);
-      lt = core.getUpdateHandler().getSolrCoreState().getLeaderThrottle();
-    }
-
-    try {
-      lt.minimumWaitBetweenActions();
-      lt.markAttemptingAction();
-      
-      
-      int leaderVoteWait = cc.getZkController().getLeaderVoteWait();
-      
-      log.debug("Running the leader process for shard={} and weAreReplacement={} and leaderVoteWait={}", shardId, weAreReplacement, leaderVoteWait);
-      if (zkController.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() > 1) {
-        // Clear the leader in clusterstate. We only need to worry about this if there is actually more than one replica.
-        ZkNodeProps m = new ZkNodeProps(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
-            ZkStateReader.SHARD_ID_PROP, shardId, ZkStateReader.COLLECTION_PROP, collection);
-        zkController.getOverseer().getStateUpdateQueue().offer(Utils.toJSON(m));
-      }
-
-      boolean allReplicasInLine = false;
-      if (!weAreReplacement) {
-        allReplicasInLine = waitForReplicasToComeUp(leaderVoteWait);
-      } else {
-        allReplicasInLine = areAllReplicasParticipating();
-      }
-      
-      if (isClosed) {
-        // Solr is shutting down or the ZooKeeper session expired while waiting for replicas. If the later, 
-        // we cannot be sure we are still the leader, so we should bail out. The OnReconnect handler will 
-        // re-register the cores and handle a new leadership election.
-        return;
-      }
-      
-      Replica.Type replicaType;
-      String coreNodeName;
-      boolean setTermToMax = false;
-      try (SolrCore core = cc.getCore(coreName)) {
-        
-        if (core == null) {
-          return;
-        }
-        
-        replicaType = core.getCoreDescriptor().getCloudDescriptor().getReplicaType();
-        coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
-        // should I be leader?
-        ZkShardTerms zkShardTerms = zkController.getShardTerms(collection, shardId);
-        if (zkShardTerms.registered(coreNodeName) && !zkShardTerms.canBecomeLeader(coreNodeName)) {
-          if (!waitForEligibleBecomeLeaderAfterTimeout(zkShardTerms, coreNodeName, leaderVoteWait)) {
-            rejoinLeaderElection(core);
-            return;
-          } else {
-            // only log an error if this replica win the election
-            setTermToMax = true;
-          }
-        }
-
-        if (isClosed) {
-          return;
-        }
-        
-        log.info("I may be the new leader - try and sync");
-        
-        // we are going to attempt to be the leader
-        // first cancel any current recovery
-        core.getUpdateHandler().getSolrCoreState().cancelRecovery();
-        
-        if (weAreReplacement) {
-          // wait a moment for any floating updates to finish
-          try {
-            Thread.sleep(2500);
-          } catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-            throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, e);
-          }
-        }
-
-        PeerSync.PeerSyncResult result = null;
-        boolean success = false;
-        try {
-          result = syncStrategy.sync(zkController, core, leaderProps, weAreReplacement);
-          success = result.isSuccess();
-        } catch (Exception e) {
-          SolrException.log(log, "Exception while trying to sync", e);
-          result = PeerSync.PeerSyncResult.failure();
-        }
-        
-        UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
-        
-        if (!success) {
-          boolean hasRecentUpdates = false;
-          if (ulog != null) {
-            // TODO: we could optimize this if necessary
-            try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) {
-              hasRecentUpdates = !recentUpdates.getVersions(1).isEmpty();
-            }
-          }
-          
-          if (!hasRecentUpdates) {
-            // we failed sync, but we have no versions - we can't sync in that case
-            // - we were active
-            // before, so become leader anyway if no one else has any versions either
-            if (result.getOtherHasVersions().orElse(false))  {
-              log.info("We failed sync, but we have no versions - we can't sync in that case. But others have some versions, so we should not become leader");
-              success = false;
-            } else  {
-              log.info(
-                  "We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway");
-              success = true;
-            }
-          }
-        }
-        
-        // solrcloud_debug
-        if (log.isDebugEnabled()) {
-          try {
-            RefCounted<SolrIndexSearcher> searchHolder = core.getNewestSearcher(false);
-            SolrIndexSearcher searcher = searchHolder.get();
-            try {
-              if (log.isDebugEnabled()) {
-                log.debug("{} synched {}", core.getCoreContainer().getZkController().getNodeName()
-                    , searcher.count(new MatchAllDocsQuery()));
-              }
-            } finally {
-              searchHolder.decref();
-            }
-          } catch (Exception e) {
-            log.error("Error in solrcloud_debug block", e);
-          }
-        }
-        if (!success) {
-          rejoinLeaderElection(core);
-          return;
-        }
-        
-      }
-      
-      boolean isLeader = true;
-      if (!isClosed) {
-        try {
-          if (replicaType == Replica.Type.TLOG) {
-            // stop replicate from old leader
-            zkController.stopReplicationFromLeader(coreName);
-            if (weAreReplacement) {
-              try (SolrCore core = cc.getCore(coreName)) {
-                Future<UpdateLog.RecoveryInfo> future = core.getUpdateHandler().getUpdateLog().recoverFromCurrentLog();
-                if (future != null) {
-                  log.info("Replaying tlog before become new leader");
-                  future.get();
-                } else {
-                  log.info("New leader does not have old tlog to replay");
-                }
-              }
-            }
-          }
-          // in case of leaderVoteWait timeout, a replica with lower term can win the election
-          if (setTermToMax) {
-            log.error("WARNING: Potential data loss -- Replica {} became leader after timeout (leaderVoteWait) {}"
-                , "without being up-to-date with the previous leader", coreNodeName);
-            zkController.getShardTerms(collection, shardId).setTermEqualsToLeader(coreNodeName);
-          }
-          super.runLeaderProcess(weAreReplacement, 0);
-          try (SolrCore core = cc.getCore(coreName)) {
-            if (core != null) {
-              core.getCoreDescriptor().getCloudDescriptor().setLeader(true);
-              publishActiveIfRegisteredAndNotActive(core);
-            } else {
-              return;
-            }
-          }
-          if (log.isInfoEnabled()) {
-            log.info("I am the new leader: {} {}", ZkCoreNodeProps.getCoreUrl(leaderProps), shardId);
-          }
-          
-          // we made it as leader - send any recovery requests we need to
-          syncStrategy.requestRecoveries();
-
-        } catch (SessionExpiredException e) {
-          throw new SolrException(ErrorCode.SERVER_ERROR,
-              "ZK session expired - cancelling election for " + collection + " " + shardId);
-        } catch (Exception e) {
-          isLeader = false;
-          SolrException.log(log, "There was a problem trying to register as the leader", e);
-          
-          try (SolrCore core = cc.getCore(coreName)) {
-            
-            if (core == null) {
-              if (log.isDebugEnabled()) {
-                log.debug("SolrCore not found: {} in {}", coreName, cc.getLoadedCoreNames());
-              }
-              return;
-            }
-            
-            core.getCoreDescriptor().getCloudDescriptor().setLeader(false);
-            
-            // we could not publish ourselves as leader - try and rejoin election
-            try {
-              rejoinLeaderElection(core);
-            } catch (SessionExpiredException exc) {
-              throw new SolrException(ErrorCode.SERVER_ERROR,
-                  "ZK session expired - cancelling election for " + collection + " " + shardId);
-            }
-          }
-        }
-      } else {
-        cancelElection();
-      }
-    } finally {
-      MDCLoggingContext.clear();
-    }
-  }
-
-  /**
-   * Wait for other replicas with higher terms participate in the electioon
-   * @return true if after {@code timeout} there are no other replicas with higher term participate in the election,
-   * false if otherwise
-   */
-  private boolean waitForEligibleBecomeLeaderAfterTimeout(ZkShardTerms zkShardTerms, String coreNodeName, int timeout) throws InterruptedException {
-    long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeout, TimeUnit.MILLISECONDS);
-    while (!isClosed && !cc.isShutDown()) {
-      if (System.nanoTime() > timeoutAt) {
-        log.warn("After waiting for {}ms, no other potential leader was found, {} try to become leader anyway (core_term:{}, highest_term:{})",
-            timeout, coreNodeName, zkShardTerms.getTerm(coreNodeName), zkShardTerms.getHighestTerm());
-        return true;
-      }
-      if (replicasWithHigherTermParticipated(zkShardTerms, coreNodeName)) {
-        log.info("Can't become leader, other replicas with higher term participated in leader election");
-        return false;
-      }
-      Thread.sleep(500L);
-    }
-    return false;
-  }
-
-  /**
-   * Do other replicas with higher term participated in the election
-   * @return true if other replicas with higher term participated in the election, false if otherwise
-   */
-  private boolean replicasWithHigherTermParticipated(ZkShardTerms zkShardTerms, String coreNodeName) {
-    ClusterState clusterState = zkController.getClusterState();
-    DocCollection docCollection = clusterState.getCollectionOrNull(collection);
-    Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId);
-    if (slices == null) return false;
-
-    long replicaTerm = zkShardTerms.getTerm(coreNodeName);
-    boolean isRecovering = zkShardTerms.isRecovering(coreNodeName);
-
-    for (Replica replica : slices.getReplicas()) {
-      if (replica.getName().equals(coreNodeName)) continue;
-
-      if (clusterState.getLiveNodes().contains(replica.getNodeName())) {
-        long otherTerm = zkShardTerms.getTerm(replica.getName());
-        boolean isOtherReplicaRecovering = zkShardTerms.isRecovering(replica.getName());
-
-        if (isRecovering && !isOtherReplicaRecovering) return true;
-        if (otherTerm > replicaTerm) return true;
-      }
-    }
-    return false;
-  }
-
-  public void publishActiveIfRegisteredAndNotActive(SolrCore core) throws Exception {
-      if (core.getCoreDescriptor().getCloudDescriptor().hasRegistered()) {
-        ZkStateReader zkStateReader = zkController.getZkStateReader();
-        zkStateReader.forceUpdateCollection(collection);
-        ClusterState clusterState = zkStateReader.getClusterState();
-        Replica rep = getReplica(clusterState, collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP));
-        if (rep == null) return;
-        if (rep.getState() != Replica.State.ACTIVE || core.getCoreDescriptor().getCloudDescriptor().getLastPublished() != Replica.State.ACTIVE) {
-          log.debug("We have become the leader after core registration but are not in an ACTIVE state - publishing ACTIVE");
-          zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
-        }
-      }
-  }
-  
-  private Replica getReplica(ClusterState clusterState, String collectionName, String replicaName) {
-    if (clusterState == null) return null;
-    final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName);
-    if (docCollection == null) return null;
-    return docCollection.getReplica(replicaName);
-  }
-
-  // returns true if all replicas are found to be up, false if not
-  private boolean waitForReplicasToComeUp(int timeoutms) throws InterruptedException {
-    long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS);
-    final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE;
-    
-    DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection);
-    Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId);
-    int cnt = 0;
-    while (!isClosed && !cc.isShutDown()) {
-      // wait for everyone to be up
-      if (slices != null) {
-        int found = 0;
-        try {
-          found = zkClient.getChildren(shardsElectZkPath, null, true).size();
-        } catch (KeeperException e) {
-          if (e instanceof KeeperException.SessionExpiredException) {
-            // if the session has expired, then another election will be launched, so
-            // quit here
-            throw new SolrException(ErrorCode.SERVER_ERROR,
-                                    "ZK session expired - cancelling election for " + collection + " " + shardId);
-          }
-          SolrException.log(log,
-              "Error checking for the number of election participants", e);
-        }
-        
-        // on startup and after connection timeout, wait for all known shards
-        if (found >= slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size()) {
-          log.info("Enough replicas found to continue.");
-          return true;
-        } else {
-          if (cnt % 40 == 0) {
-            if (log.isInfoEnabled()) {
-              log.info("Waiting until we see more replicas up for shard {}: total={} found={} timeoute in={}ms"
-                  , shardId, slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size(), found,
-                  TimeUnit.MILLISECONDS.convert(timeoutAt - System.nanoTime(), TimeUnit.NANOSECONDS));
-            }
-          }
-        }
-        
-        if (System.nanoTime() > timeoutAt) {
-          log.info("Was waiting for replicas to come up, but they are taking too long - assuming they won't come back till later");
-          return false;
-        }
-      } else {
-        log.warn("Shard not found: {} for collection {}", shardId, collection);
-
-        return false;
-
-      }
-      
-      Thread.sleep(500);
-      docCollection = zkController.getClusterState().getCollectionOrNull(collection);
-      slices = (docCollection == null) ? null : docCollection.getSlice(shardId);
-      cnt++;
-    }
-    return false;
-  }
-  
-  // returns true if all replicas are found to be up, false if not
-  private boolean areAllReplicasParticipating() throws InterruptedException {
-    final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE;
-    final DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection);
-    
-    if (docCollection != null && docCollection.getSlice(shardId) != null) {
-      final Slice slices = docCollection.getSlice(shardId);
-      int found = 0;
-      try {
-        found = zkClient.getChildren(shardsElectZkPath, null, true).size();
-      } catch (KeeperException e) {
-        if (e instanceof KeeperException.SessionExpiredException) {
-          // if the session has expired, then another election will be launched, so
-          // quit here
-          throw new SolrException(ErrorCode.SERVER_ERROR,
-              "ZK session expired - cancelling election for " + collection + " " + shardId);
-        }
-        SolrException.log(log, "Error checking for the number of election participants", e);
-      }
-      
-      if (found >= slices.getReplicasMap().size()) {
-        log.debug("All replicas are ready to participate in election.");
-        return true;
-      }
-    } else {
-      log.warn("Shard not found: {} for collection {}", shardId, collection);
-      return false;
-    }
-    return false;
-  }
-
-  private void rejoinLeaderElection(SolrCore core)
-      throws InterruptedException, KeeperException, IOException {
-    // remove our ephemeral and re join the election
-    if (cc.isShutDown()) {
-      log.debug("Not rejoining election because CoreContainer is closed");
-      return;
-    }
-    
-    log.info("There may be a better leader candidate than us - going back into recovery");
-    
-    cancelElection();
-    
-    core.getUpdateHandler().getSolrCoreState().doRecovery(cc, core.getCoreDescriptor());
-    
-    leaderElector.joinElection(this, true);
-  }
-
-}
-
-final class OverseerElectionContext extends ElectionContext {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private final SolrZkClient zkClient;
-  private final Overseer overseer;
-  private volatile boolean isClosed = false;
-
-  public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
-    super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient);
-    this.overseer = overseer;
-    this.zkClient = zkClient;
-    try {
-      new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient);
-    } catch (KeeperException e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, e);
-    } catch (InterruptedException e) {
-      Thread.currentThread().interrupt();
-      throw new SolrException(ErrorCode.SERVER_ERROR, e);
-    }
-  }
-
-  @Override
-  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException,
-      InterruptedException {
-    if (isClosed) {
-      return;
-    }
-    log.info("I am going to be the leader {}", id);
-    final String id = leaderSeqPath
-        .substring(leaderSeqPath.lastIndexOf("/") + 1);
-    ZkNodeProps myProps = new ZkNodeProps(ID, id);
-
-    zkClient.makePath(leaderPath, Utils.toJSON(myProps),
-        CreateMode.EPHEMERAL, true);
-    if(pauseBeforeStartMs >0){
-      try {
-        Thread.sleep(pauseBeforeStartMs);
-      } catch (InterruptedException e) {
-        Thread.interrupted();
-        log.warn("Wait interrupted ", e);
-      }
-    }
-    synchronized (this) {
-      if (!this.isClosed && !overseer.getZkController().getCoreContainer().isShutDown()) {
-        overseer.start(id);
-      }
-    }
-  }
-  
-  @Override
-  public void cancelElection() throws InterruptedException, KeeperException {
-    super.cancelElection();
-    overseer.close();
-  }
-  
-  @Override
-  public synchronized void close() {
-    this.isClosed  = true;
-    overseer.close();
-  }
-
-  @Override
-  public ElectionContext copy() {
-    return new OverseerElectionContext(zkClient, overseer ,id);
-  }
-  
-  @Override
-  public void joinedElectionFired() {
-    overseer.close();
-  }
-  
-  @Override
-  public void checkIfIamLeaderFired() {
-    // leader changed - close the overseer
-    overseer.close();
-  }
 
-}
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java
new file mode 100644
index 0000000..e25befa
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerElectionContext.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkCmdExecutor;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.util.Utils;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CommonParams.ID;
+
+final class OverseerElectionContext extends ElectionContext {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private final SolrZkClient zkClient;
+  private final Overseer overseer;
+  private volatile boolean isClosed = false;
+
+  public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
+    super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient);
+    this.overseer = overseer;
+    this.zkClient = zkClient;
+    try {
+      new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient);
+    } catch (KeeperException e) {
+      throw new SolrException(ErrorCode.SERVER_ERROR, e);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      throw new SolrException(ErrorCode.SERVER_ERROR, e);
+    }
+  }
+
+  @Override
+  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException,
+      InterruptedException {
+    if (isClosed) {
+      return;
+    }
+    log.info("I am going to be the leader {}", id);
+    final String id = leaderSeqPath
+        .substring(leaderSeqPath.lastIndexOf("/") + 1);
+    ZkNodeProps myProps = new ZkNodeProps(ID, id);
+
+    zkClient.makePath(leaderPath, Utils.toJSON(myProps),
+        CreateMode.EPHEMERAL, true);
+    if (pauseBeforeStartMs > 0) {
+      try {
+        Thread.sleep(pauseBeforeStartMs);
+      } catch (InterruptedException e) {
+        Thread.interrupted();
+        log.warn("Wait interrupted ", e);
+      }
+    }
+    synchronized (this) {
+      if (!this.isClosed && !overseer.getZkController().getCoreContainer().isShutDown()) {
+        overseer.start(id);
+      }
+    }
+  }
+
+  @Override
+  public void cancelElection() throws InterruptedException, KeeperException {
+    super.cancelElection();
+    overseer.close();
+  }
+
+  @Override
+  public synchronized void close() {
+    this.isClosed = true;
+    overseer.close();
+  }
+
+  @Override
+  public ElectionContext copy() {
+    return new OverseerElectionContext(zkClient, overseer, id);
+  }
+
+  @Override
+  public void joinedElectionFired() {
+    overseer.close();
+  }
+
+  @Override
+  public void checkIfIamLeaderFired() {
+    // leader changed - close the overseer
+    overseer.close();
+  }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
similarity index 58%
copy from solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
copy to solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
index 9ba4900..f6c96ca 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContext.java
@@ -16,16 +16,12 @@
  */
 package org.apache.solr.cloud;
 
-import java.io.Closeable;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
 import java.util.EnumSet;
-import java.util.List;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.lucene.search.MatchAllDocsQuery;
 import org.apache.solr.cloud.overseer.OverseerAction;
 import org.apache.solr.common.SolrException;
@@ -34,12 +30,9 @@ import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.ZkCmdExecutor;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.RetryUtil;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
@@ -48,246 +41,36 @@ import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.update.PeerSync;
 import org.apache.solr.update.UpdateLog;
 import org.apache.solr.util.RefCounted;
-import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.KeeperException.NoNodeException;
-import org.apache.zookeeper.KeeperException.NodeExistsException;
 import org.apache.zookeeper.KeeperException.SessionExpiredException;
-import org.apache.zookeeper.Op;
-import org.apache.zookeeper.OpResult;
-import org.apache.zookeeper.OpResult.SetDataResult;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.params.CommonParams.ID;
-
-public abstract class ElectionContext implements Closeable {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  final String electionPath;
-  final ZkNodeProps leaderProps;
-  final String id;
-  final String leaderPath;
-  volatile String leaderSeqPath;
-  private SolrZkClient zkClient;
-
-  public ElectionContext(final String coreNodeName,
-      final String electionPath, final String leaderPath, final ZkNodeProps leaderProps, final SolrZkClient zkClient) {
-    assert zkClient != null;
-    this.id = coreNodeName;
-    this.electionPath = electionPath;
-    this.leaderPath = leaderPath;
-    this.leaderProps = leaderProps;
-    this.zkClient = zkClient;
-  }
-  
-  public void close() {
-
-  }
-  
-  public void cancelElection() throws InterruptedException, KeeperException {
-    if (leaderSeqPath != null) {
-      try {
-        log.debug("Canceling election {}", leaderSeqPath);
-        zkClient.delete(leaderSeqPath, -1, true);
-      } catch (NoNodeException e) {
-        // fine
-        log.debug("cancelElection did not find election node to remove {}", leaderSeqPath);
-      }
-    } else {
-      log.debug("cancelElection skipped as this context has not been initialized");
-    }
-  }
-
-  abstract void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException, InterruptedException, IOException;
-
-  public void checkIfIamLeaderFired() {}
-
-  public void joinedElectionFired() {}
-
-  public  ElectionContext copy(){
-    throw new UnsupportedOperationException("copy");
-  }
-}
-
-class ShardLeaderElectionContextBase extends ElectionContext {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  protected final SolrZkClient zkClient;
-  protected String shardId;
-  protected String collection;
-  protected LeaderElector leaderElector;
-  protected ZkStateReader zkStateReader;
-  protected ZkController zkController;
-  private Integer leaderZkNodeParentVersion;
-
-  // Prevents a race between cancelling and becoming leader.
-  private final Object lock = new Object();
-
-  public ShardLeaderElectionContextBase(LeaderElector leaderElector,
-      final String shardId, final String collection, final String coreNodeName,
-      ZkNodeProps props, ZkController zkController) {
-    super(coreNodeName, ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection
-        + "/leader_elect/" + shardId, ZkStateReader.getShardLeadersPath(
-        collection, shardId), props, zkController.getZkClient());
-    this.leaderElector = leaderElector;
-    this.zkStateReader = zkController.getZkStateReader();
-    this.zkClient = zkStateReader.getZkClient();
-    this.zkController = zkController;
-    this.shardId = shardId;
-    this.collection = collection;
-    
-    String parent = new Path(leaderPath).getParent().toString();
-    ZkCmdExecutor zcmd = new ZkCmdExecutor(30000);
-    // only if /collections/{collection} exists already do we succeed in creating this path
-    log.info("make sure parent is created {}", parent);
-    try {
-      zcmd.ensureExists(parent, (byte[])null, CreateMode.PERSISTENT, zkClient, 2);
-    } catch (KeeperException e) {
-      throw new RuntimeException(e);
-    } catch (InterruptedException e) {
-      Thread.currentThread().interrupt();
-      throw new RuntimeException(e);
-    }
-  }
-  
-  @Override
-  public void cancelElection() throws InterruptedException, KeeperException {
-    super.cancelElection();
-    synchronized (lock) {
-      if (leaderZkNodeParentVersion != null) {
-        try {
-          // We need to be careful and make sure we *only* delete our own leader registration node.
-          // We do this by using a multi and ensuring the parent znode of the leader registration node
-          // matches the version we expect - there is a setData call that increments the parent's znode
-          // version whenever a leader registers.
-          log.debug("Removing leader registration node on cancel: {} {}", leaderPath, leaderZkNodeParentVersion);
-          List<Op> ops = new ArrayList<>(2);
-          ops.add(Op.check(new Path(leaderPath).getParent().toString(), leaderZkNodeParentVersion));
-          ops.add(Op.delete(leaderPath, -1));
-          zkClient.multi(ops, true);
-        } catch (KeeperException.NoNodeException nne) {
-          // no problem
-          log.debug("No leader registration node found to remove: {}", leaderPath);
-        } catch (KeeperException.BadVersionException bve) {
-          log.info("Cannot remove leader registration node because the current registered node is not ours: {}", leaderPath);
-          // no problem
-        } catch (InterruptedException e) {
-          throw e;
-        } catch (Exception e) {
-          SolrException.log(log, e);
-        }
-        leaderZkNodeParentVersion = null;
-      } else {
-        log.info("No version found for ephemeral leader parent node, won't remove previous leader registration.");
-      }
-    }
-  }
-  
-  @Override
-  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs)
-      throws KeeperException, InterruptedException, IOException {
-    // register as leader - if an ephemeral is already there, wait to see if it goes away
-
-    String parent = new Path(leaderPath).getParent().toString();
-    try {
-      RetryUtil.retryOnThrowable(NodeExistsException.class, 60000, 5000, () -> {
-        synchronized (lock) {
-          log.info("Creating leader registration node {} after winning as {}", leaderPath, leaderSeqPath);
-          List<Op> ops = new ArrayList<>(2);
-
-          // We use a multi operation to get the parent nodes version, which will
-          // be used to make sure we only remove our own leader registration node.
-          // The setData call used to get the parent version is also the trigger to
-          // increment the version. We also do a sanity check that our leaderSeqPath exists.
-
-          ops.add(Op.check(leaderSeqPath, -1));
-          ops.add(Op.create(leaderPath, Utils.toJSON(leaderProps), zkClient.getZkACLProvider().getACLsToAdd(leaderPath), CreateMode.EPHEMERAL));
-          ops.add(Op.setData(parent, null, -1));
-          List<OpResult> results;
-
-          results = zkClient.multi(ops, true);
-          for (OpResult result : results) {
-            if (result.getType() == ZooDefs.OpCode.setData) {
-              SetDataResult dresult = (SetDataResult) result;
-              Stat stat = dresult.getStat();
-              leaderZkNodeParentVersion = stat.getVersion();
-              return;
-            }
-          }
-          assert leaderZkNodeParentVersion != null;
-        }
-      });
-    } catch (NoNodeException e) {
-      log.info("Will not register as leader because it seems the election is no longer taking place.");
-      return;
-    } catch (Throwable t) {
-      if (t instanceof OutOfMemoryError) {
-        throw (OutOfMemoryError) t;
-      }
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed", t);
-    } 
-    
-    assert shardId != null;
-    boolean isAlreadyLeader = false;
-    if (zkStateReader.getClusterState() != null &&
-        zkStateReader.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() < 2) {
-      Replica leader = zkStateReader.getLeader(collection, shardId);
-      if (leader != null
-          && leader.getBaseUrl().equals(leaderProps.get(ZkStateReader.BASE_URL_PROP))
-          && leader.getCoreName().equals(leaderProps.get(ZkStateReader.CORE_NAME_PROP))) {
-        isAlreadyLeader = true;
-      }
-    }
-    if (!isAlreadyLeader) {
-      ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
-          ZkStateReader.SHARD_ID_PROP, shardId,
-          ZkStateReader.COLLECTION_PROP, collection,
-          ZkStateReader.BASE_URL_PROP, leaderProps.get(ZkStateReader.BASE_URL_PROP),
-          ZkStateReader.CORE_NAME_PROP, leaderProps.get(ZkStateReader.CORE_NAME_PROP),
-          ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
-      assert zkController != null;
-      assert zkController.getOverseer() != null;
-      zkController.getOverseer().offerStateUpdate(Utils.toJSON(m));
-    }
-  }
-
-  public LeaderElector getLeaderElector() {
-    return leaderElector;
-  }
-
-  Integer getLeaderZkNodeParentVersion() {
-    synchronized (lock) {
-      return leaderZkNodeParentVersion;
-    }
-  }
-}
-
 // add core container and stop passing core around...
 final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
+
   private final CoreContainer cc;
   private final SyncStrategy syncStrategy;
 
   private volatile boolean isClosed = false;
-  
-  public ShardLeaderElectionContext(LeaderElector leaderElector, 
-      final String shardId, final String collection,
-      final String coreNodeName, ZkNodeProps props, ZkController zkController, CoreContainer cc) {
+
+  public ShardLeaderElectionContext(LeaderElector leaderElector,
+                                    final String shardId, final String collection,
+                                    final String coreNodeName, ZkNodeProps props, ZkController zkController, CoreContainer cc) {
     super(leaderElector, shardId, collection, coreNodeName, props,
         zkController);
     this.cc = cc;
     syncStrategy = new SyncStrategy(cc);
   }
-  
+
   @Override
   public void close() {
     super.close();
-    this.isClosed  = true;
+    this.isClosed = true;
     syncStrategy.close();
   }
-  
+
   @Override
   public void cancelElection() throws InterruptedException, KeeperException {
     String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP);
@@ -296,25 +79,25 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         core.getCoreDescriptor().getCloudDescriptor().setLeader(false);
       }
     }
-    
+
     super.cancelElection();
   }
-  
+
   @Override
   public ElectionContext copy() {
     return new ShardLeaderElectionContext(leaderElector, shardId, collection, id, leaderProps, zkController, cc);
   }
-  
-  /* 
+
+  /*
    * weAreReplacement: has someone else been the leader already?
    */
   @Override
   void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStart) throws KeeperException,
- InterruptedException, IOException {
+      InterruptedException, IOException {
     String coreName = leaderProps.getStr(ZkStateReader.CORE_NAME_PROP);
     ActionThrottle lt;
     try (SolrCore core = cc.getCore(coreName)) {
-      if (core == null ) {
+      if (core == null) {
         // shutdown or removed
         return;
       }
@@ -325,10 +108,10 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
     try {
       lt.minimumWaitBetweenActions();
       lt.markAttemptingAction();
-      
-      
+
+
       int leaderVoteWait = cc.getZkController().getLeaderVoteWait();
-      
+
       log.debug("Running the leader process for shard={} and weAreReplacement={} and leaderVoteWait={}", shardId, weAreReplacement, leaderVoteWait);
       if (zkController.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() > 1) {
         // Clear the leader in clusterstate. We only need to worry about this if there is actually more than one replica.
@@ -343,23 +126,23 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
       } else {
         allReplicasInLine = areAllReplicasParticipating();
       }
-      
+
       if (isClosed) {
-        // Solr is shutting down or the ZooKeeper session expired while waiting for replicas. If the later, 
-        // we cannot be sure we are still the leader, so we should bail out. The OnReconnect handler will 
+        // Solr is shutting down or the ZooKeeper session expired while waiting for replicas. If the later,
+        // we cannot be sure we are still the leader, so we should bail out. The OnReconnect handler will
         // re-register the cores and handle a new leadership election.
         return;
       }
-      
+
       Replica.Type replicaType;
       String coreNodeName;
       boolean setTermToMax = false;
       try (SolrCore core = cc.getCore(coreName)) {
-        
+
         if (core == null) {
           return;
         }
-        
+
         replicaType = core.getCoreDescriptor().getCloudDescriptor().getReplicaType();
         coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName();
         // should I be leader?
@@ -377,20 +160,20 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         if (isClosed) {
           return;
         }
-        
+
         log.info("I may be the new leader - try and sync");
-        
+
         // we are going to attempt to be the leader
         // first cancel any current recovery
         core.getUpdateHandler().getSolrCoreState().cancelRecovery();
-        
+
         if (weAreReplacement) {
           // wait a moment for any floating updates to finish
           try {
             Thread.sleep(2500);
           } catch (InterruptedException e) {
             Thread.currentThread().interrupt();
-            throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, e);
+            throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, e);
           }
         }
 
@@ -403,9 +186,9 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           SolrException.log(log, "Exception while trying to sync", e);
           result = PeerSync.PeerSyncResult.failure();
         }
-        
+
         UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
-        
+
         if (!success) {
           boolean hasRecentUpdates = false;
           if (ulog != null) {
@@ -414,22 +197,22 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
               hasRecentUpdates = !recentUpdates.getVersions(1).isEmpty();
             }
           }
-          
+
           if (!hasRecentUpdates) {
             // we failed sync, but we have no versions - we can't sync in that case
             // - we were active
             // before, so become leader anyway if no one else has any versions either
-            if (result.getOtherHasVersions().orElse(false))  {
+            if (result.getOtherHasVersions().orElse(false)) {
               log.info("We failed sync, but we have no versions - we can't sync in that case. But others have some versions, so we should not become leader");
               success = false;
-            } else  {
+            } else {
               log.info(
                   "We failed sync, but we have no versions - we can't sync in that case - we were active before, so become leader anyway");
               success = true;
             }
           }
         }
-        
+
         // solrcloud_debug
         if (log.isDebugEnabled()) {
           try {
@@ -451,9 +234,9 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           rejoinLeaderElection(core);
           return;
         }
-        
+
       }
-      
+
       boolean isLeader = true;
       if (!isClosed) {
         try {
@@ -490,7 +273,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
           if (log.isInfoEnabled()) {
             log.info("I am the new leader: {} {}", ZkCoreNodeProps.getCoreUrl(leaderProps), shardId);
           }
-          
+
           // we made it as leader - send any recovery requests we need to
           syncStrategy.requestRecoveries();
 
@@ -500,18 +283,18 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         } catch (Exception e) {
           isLeader = false;
           SolrException.log(log, "There was a problem trying to register as the leader", e);
-          
+
           try (SolrCore core = cc.getCore(coreName)) {
-            
+
             if (core == null) {
               if (log.isDebugEnabled()) {
                 log.debug("SolrCore not found: {} in {}", coreName, cc.getLoadedCoreNames());
               }
               return;
             }
-            
+
             core.getCoreDescriptor().getCloudDescriptor().setLeader(false);
-            
+
             // we could not publish ourselves as leader - try and rejoin election
             try {
               rejoinLeaderElection(core);
@@ -531,6 +314,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
 
   /**
    * Wait for other replicas with higher terms participate in the electioon
+   *
    * @return true if after {@code timeout} there are no other replicas with higher term participate in the election,
    * false if otherwise
    */
@@ -553,6 +337,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
 
   /**
    * Do other replicas with higher term participated in the election
+   *
    * @return true if other replicas with higher term participated in the election, false if otherwise
    */
   private boolean replicasWithHigherTermParticipated(ZkShardTerms zkShardTerms, String coreNodeName) {
@@ -579,19 +364,19 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
   }
 
   public void publishActiveIfRegisteredAndNotActive(SolrCore core) throws Exception {
-      if (core.getCoreDescriptor().getCloudDescriptor().hasRegistered()) {
-        ZkStateReader zkStateReader = zkController.getZkStateReader();
-        zkStateReader.forceUpdateCollection(collection);
-        ClusterState clusterState = zkStateReader.getClusterState();
-        Replica rep = getReplica(clusterState, collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP));
-        if (rep == null) return;
-        if (rep.getState() != Replica.State.ACTIVE || core.getCoreDescriptor().getCloudDescriptor().getLastPublished() != Replica.State.ACTIVE) {
-          log.debug("We have become the leader after core registration but are not in an ACTIVE state - publishing ACTIVE");
-          zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
-        }
+    if (core.getCoreDescriptor().getCloudDescriptor().hasRegistered()) {
+      ZkStateReader zkStateReader = zkController.getZkStateReader();
+      zkStateReader.forceUpdateCollection(collection);
+      ClusterState clusterState = zkStateReader.getClusterState();
+      Replica rep = getReplica(clusterState, collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP));
+      if (rep == null) return;
+      if (rep.getState() != Replica.State.ACTIVE || core.getCoreDescriptor().getCloudDescriptor().getLastPublished() != Replica.State.ACTIVE) {
+        log.debug("We have become the leader after core registration but are not in an ACTIVE state - publishing ACTIVE");
+        zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
       }
+    }
   }
-  
+
   private Replica getReplica(ClusterState clusterState, String collectionName, String replicaName) {
     if (clusterState == null) return null;
     final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName);
@@ -603,7 +388,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
   private boolean waitForReplicasToComeUp(int timeoutms) throws InterruptedException {
     long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS);
     final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE;
-    
+
     DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection);
     Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId);
     int cnt = 0;
@@ -618,12 +403,12 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
             // if the session has expired, then another election will be launched, so
             // quit here
             throw new SolrException(ErrorCode.SERVER_ERROR,
-                                    "ZK session expired - cancelling election for " + collection + " " + shardId);
+                "ZK session expired - cancelling election for " + collection + " " + shardId);
           }
           SolrException.log(log,
               "Error checking for the number of election participants", e);
         }
-        
+
         // on startup and after connection timeout, wait for all known shards
         if (found >= slices.getReplicas(EnumSet.of(Replica.Type.TLOG, Replica.Type.NRT)).size()) {
           log.info("Enough replicas found to continue.");
@@ -637,7 +422,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
             }
           }
         }
-        
+
         if (System.nanoTime() > timeoutAt) {
           log.info("Was waiting for replicas to come up, but they are taking too long - assuming they won't come back till later");
           return false;
@@ -648,7 +433,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         return false;
 
       }
-      
+
       Thread.sleep(500);
       docCollection = zkController.getClusterState().getCollectionOrNull(collection);
       slices = (docCollection == null) ? null : docCollection.getSlice(shardId);
@@ -656,12 +441,12 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
     }
     return false;
   }
-  
+
   // returns true if all replicas are found to be up, false if not
   private boolean areAllReplicasParticipating() throws InterruptedException {
     final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE;
     final DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection);
-    
+
     if (docCollection != null && docCollection.getSlice(shardId) != null) {
       final Slice slices = docCollection.getSlice(shardId);
       int found = 0;
@@ -676,7 +461,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
         }
         SolrException.log(log, "Error checking for the number of election participants", e);
       }
-      
+
       if (found >= slices.getReplicasMap().size()) {
         log.debug("All replicas are ready to participate in election.");
         return true;
@@ -695,92 +480,14 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase {
       log.debug("Not rejoining election because CoreContainer is closed");
       return;
     }
-    
-    log.info("There may be a better leader candidate than us - going back into recovery");
-    
-    cancelElection();
-    
-    core.getUpdateHandler().getSolrCoreState().doRecovery(cc, core.getCoreDescriptor());
-    
-    leaderElector.joinElection(this, true);
-  }
 
-}
-
-final class OverseerElectionContext extends ElectionContext {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private final SolrZkClient zkClient;
-  private final Overseer overseer;
-  private volatile boolean isClosed = false;
+    log.info("There may be a better leader candidate than us - going back into recovery");
 
-  public OverseerElectionContext(SolrZkClient zkClient, Overseer overseer, final String zkNodeName) {
-    super(zkNodeName, Overseer.OVERSEER_ELECT, Overseer.OVERSEER_ELECT + "/leader", null, zkClient);
-    this.overseer = overseer;
-    this.zkClient = zkClient;
-    try {
-      new ZkCmdExecutor(zkClient.getZkClientTimeout()).ensureExists(Overseer.OVERSEER_ELECT, zkClient);
-    } catch (KeeperException e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, e);
-    } catch (InterruptedException e) {
-      Thread.currentThread().interrupt();
-      throw new SolrException(ErrorCode.SERVER_ERROR, e);
-    }
-  }
+    cancelElection();
 
-  @Override
-  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs) throws KeeperException,
-      InterruptedException {
-    if (isClosed) {
-      return;
-    }
-    log.info("I am going to be the leader {}", id);
-    final String id = leaderSeqPath
-        .substring(leaderSeqPath.lastIndexOf("/") + 1);
-    ZkNodeProps myProps = new ZkNodeProps(ID, id);
-
-    zkClient.makePath(leaderPath, Utils.toJSON(myProps),
-        CreateMode.EPHEMERAL, true);
-    if(pauseBeforeStartMs >0){
-      try {
-        Thread.sleep(pauseBeforeStartMs);
-      } catch (InterruptedException e) {
-        Thread.interrupted();
-        log.warn("Wait interrupted ", e);
-      }
-    }
-    synchronized (this) {
-      if (!this.isClosed && !overseer.getZkController().getCoreContainer().isShutDown()) {
-        overseer.start(id);
-      }
-    }
-  }
-  
-  @Override
-  public void cancelElection() throws InterruptedException, KeeperException {
-    super.cancelElection();
-    overseer.close();
-  }
-  
-  @Override
-  public synchronized void close() {
-    this.isClosed  = true;
-    overseer.close();
-  }
+    core.getUpdateHandler().getSolrCoreState().doRecovery(cc, core.getCoreDescriptor());
 
-  @Override
-  public ElectionContext copy() {
-    return new OverseerElectionContext(zkClient, overseer ,id);
-  }
-  
-  @Override
-  public void joinedElectionFired() {
-    overseer.close();
-  }
-  
-  @Override
-  public void checkIfIamLeaderFired() {
-    // leader changed - close the overseer
-    overseer.close();
+    leaderElector.joinElection(this, true);
   }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java
new file mode 100644
index 0000000..a9afc8d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cloud/ShardLeaderElectionContextBase.java
@@ -0,0 +1,194 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cloud;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+import java.util.ArrayList;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.solr.cloud.overseer.OverseerAction;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.cloud.ZkCmdExecutor;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.RetryUtil;
+import org.apache.solr.common.util.Utils;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.KeeperException.NoNodeException;
+import org.apache.zookeeper.KeeperException.NodeExistsException;
+import org.apache.zookeeper.Op;
+import org.apache.zookeeper.OpResult;
+import org.apache.zookeeper.OpResult.SetDataResult;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+class ShardLeaderElectionContextBase extends ElectionContext {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  protected final SolrZkClient zkClient;
+  protected String shardId;
+  protected String collection;
+  protected LeaderElector leaderElector;
+  protected ZkStateReader zkStateReader;
+  protected ZkController zkController;
+  private Integer leaderZkNodeParentVersion;
+
+  // Prevents a race between cancelling and becoming leader.
+  private final Object lock = new Object();
+
+  public ShardLeaderElectionContextBase(LeaderElector leaderElector,
+                                        final String shardId, final String collection, final String coreNodeName,
+                                        ZkNodeProps props, ZkController zkController) {
+    super(coreNodeName, ZkStateReader.COLLECTIONS_ZKNODE + "/" + collection
+        + "/leader_elect/" + shardId, ZkStateReader.getShardLeadersPath(
+        collection, shardId), props, zkController.getZkClient());
+    this.leaderElector = leaderElector;
+    this.zkStateReader = zkController.getZkStateReader();
+    this.zkClient = zkStateReader.getZkClient();
+    this.zkController = zkController;
+    this.shardId = shardId;
+    this.collection = collection;
+
+    String parent = new Path(leaderPath).getParent().toString();
+    ZkCmdExecutor zcmd = new ZkCmdExecutor(30000);
+    // only if /collections/{collection} exists already do we succeed in creating this path
+    log.info("make sure parent is created {}", parent);
+    try {
+      zcmd.ensureExists(parent, (byte[]) null, CreateMode.PERSISTENT, zkClient, 2);
+    } catch (KeeperException e) {
+      throw new RuntimeException(e);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
+      throw new RuntimeException(e);
+    }
+  }
+
+  @Override
+  public void cancelElection() throws InterruptedException, KeeperException {
+    super.cancelElection();
+    synchronized (lock) {
+      if (leaderZkNodeParentVersion != null) {
+        // no problem
+        // no problem
+        try {
+          // We need to be careful and make sure we *only* delete our own leader registration node.
+          // We do this by using a multi and ensuring the parent znode of the leader registration node
+          // matches the version we expect - there is a setData call that increments the parent's znode
+          // version whenever a leader registers.
+          log.debug("Removing leader registration node on cancel: {} {}", leaderPath, leaderZkNodeParentVersion);
+          List<Op> ops = new ArrayList<>(2);
+          ops.add(Op.check(new Path(leaderPath).getParent().toString(), leaderZkNodeParentVersion));
+          ops.add(Op.delete(leaderPath, -1));
+          zkClient.multi(ops, true);
+        } catch (InterruptedException e) {
+          throw e;
+        } catch (IllegalArgumentException e) {
+          SolrException.log(log, e);
+        }
+        leaderZkNodeParentVersion = null;
+      } else {
+        log.info("No version found for ephemeral leader parent node, won't remove previous leader registration.");
+      }
+    }
+  }
+
+  @Override
+  void runLeaderProcess(boolean weAreReplacement, int pauseBeforeStartMs)
+      throws KeeperException, InterruptedException, IOException {
+    // register as leader - if an ephemeral is already there, wait to see if it goes away
+
+    String parent = new Path(leaderPath).getParent().toString();
+    try {
+      RetryUtil.retryOnThrowable(NodeExistsException.class, 60000, 5000, () -> {
+        synchronized (lock) {
+          log.info("Creating leader registration node {} after winning as {}", leaderPath, leaderSeqPath);
+          List<Op> ops = new ArrayList<>(2);
+
+          // We use a multi operation to get the parent nodes version, which will
+          // be used to make sure we only remove our own leader registration node.
+          // The setData call used to get the parent version is also the trigger to
+          // increment the version. We also do a sanity check that our leaderSeqPath exists.
+
+          ops.add(Op.check(leaderSeqPath, -1));
+          ops.add(Op.create(leaderPath, Utils.toJSON(leaderProps), zkClient.getZkACLProvider().getACLsToAdd(leaderPath), CreateMode.EPHEMERAL));
+          ops.add(Op.setData(parent, null, -1));
+          List<OpResult> results;
+
+          results = zkClient.multi(ops, true);
+          for (OpResult result : results) {
+            if (result.getType() == ZooDefs.OpCode.setData) {
+              SetDataResult dresult = (SetDataResult) result;
+              Stat stat = dresult.getStat();
+              leaderZkNodeParentVersion = stat.getVersion();
+              return;
+            }
+          }
+          assert leaderZkNodeParentVersion != null;
+        }
+      });
+    } catch (NoNodeException e) {
+      log.info("Will not register as leader because it seems the election is no longer taking place.");
+      return;
+    } catch (Throwable t) {
+      if (t instanceof OutOfMemoryError) {
+        throw (OutOfMemoryError) t;
+      }
+      throw new SolrException(ErrorCode.SERVER_ERROR, "Could not register as the leader because creating the ephemeral registration node in ZooKeeper failed", t);
+    }
+
+    assert shardId != null;
+    boolean isAlreadyLeader = false;
+    if (zkStateReader.getClusterState() != null &&
+        zkStateReader.getClusterState().getCollection(collection).getSlice(shardId).getReplicas().size() < 2) {
+      Replica leader = zkStateReader.getLeader(collection, shardId);
+      if (leader != null
+          && leader.getBaseUrl().equals(leaderProps.get(ZkStateReader.BASE_URL_PROP))
+          && leader.getCoreName().equals(leaderProps.get(ZkStateReader.CORE_NAME_PROP))) {
+        isAlreadyLeader = true;
+      }
+    }
+    if (!isAlreadyLeader) {
+      ZkNodeProps m = ZkNodeProps.fromKeyVals(Overseer.QUEUE_OPERATION, OverseerAction.LEADER.toLower(),
+          ZkStateReader.SHARD_ID_PROP, shardId,
+          ZkStateReader.COLLECTION_PROP, collection,
+          ZkStateReader.BASE_URL_PROP, leaderProps.get(ZkStateReader.BASE_URL_PROP),
+          ZkStateReader.CORE_NAME_PROP, leaderProps.get(ZkStateReader.CORE_NAME_PROP),
+          ZkStateReader.STATE_PROP, Replica.State.ACTIVE.toString());
+      assert zkController != null;
+      assert zkController.getOverseer() != null;
+      zkController.getOverseer().offerStateUpdate(Utils.toJSON(m));
+    }
+  }
+
+  public LeaderElector getLeaderElector() {
+    return leaderElector;
+  }
+
+  Integer getLeaderZkNodeParentVersion() {
+    synchronized (lock) {
+      return leaderZkNodeParentVersion;
+    }
+  }
+}
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java
index 011d662..1069c50 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetProcessor.java
@@ -74,7 +74,7 @@ public class PivotFacetProcessor extends SimpleFacets
     // rb._statsInfo may be null if stats=false, ie: refine requests
     // if that's the case, but we need to refine w/stats, then we'll lazy init our 
     // own instance of StatsInfo
-    StatsInfo statsInfo = rb._statsInfo; 
+    StatsInfo statsInfo = rb._statsInfo;
 
     SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<>();
     for (String pivotList : pivots) {
@@ -237,7 +237,7 @@ public class PivotFacetProcessor extends SimpleFacets
    *
    * @return A list of StatsFields to compute for this pivot, or the empty list if none
    */
-  private static List<StatsField> getTaggedStatsFields(StatsInfo statsInfo, 
+  private static List<StatsField> getTaggedStatsFields(StatsInfo statsInfo,
                                                        String statsLocalParam) {
     if (null == statsLocalParam || null == statsInfo) {
       return Collections.emptyList();
diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
index fc5c29f..4b80dae 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/StatsComponent.java
@@ -17,15 +17,11 @@
 package org.apache.solr.handler.component;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
 import java.util.LinkedHashMap;
-import java.util.List;
 import java.util.Map;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ShardParams;
-import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.StatsParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
@@ -41,13 +37,13 @@ public class StatsComponent extends SearchComponent {
 
   @Override
   public void prepare(ResponseBuilder rb) throws IOException {
-    if (rb.req.getParams().getBool(StatsParams.STATS,false)) {
-      rb.setNeedDocSet( true );
+    if (rb.req.getParams().getBool(StatsParams.STATS, false)) {
+      rb.setNeedDocSet(true);
       rb.doStats = true;
       rb._statsInfo = new StatsInfo(rb);
       for (StatsField statsField : rb._statsInfo.getStatsFields()) {
         if (statsField.getSchemaField() != null && statsField.getSchemaField().getType().isPointField() && !statsField.getSchemaField().hasDocValues()) {
-          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, 
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
               "Can't calculate stats on a PointField without docValues");
         }
       }
@@ -63,8 +59,8 @@ public class StatsComponent extends SearchComponent {
       DocSet docs = statsField.computeBaseDocSet();
       statsValues.put(statsField.getOutputKey(), statsField.computeLocalStatsValues(docs));
     }
-    
-    rb.rsp.add( "stats", convertToResponse(statsValues) );
+
+    rb.rsp.add("stats", convertToResponse(statsValues));
   }
 
   @Override
@@ -96,8 +92,8 @@ public class StatsComponent extends SearchComponent {
     for (ShardResponse srsp : sreq.responses) {
       NamedList stats = null;
       try {
-        stats = (NamedList<NamedList<NamedList<?>>>) 
-          srsp.getSolrResponse().getResponse().get("stats");
+        stats = (NamedList<NamedList<NamedList<?>>>)
+            srsp.getSolrResponse().getResponse().get("stats");
       } catch (Exception e) {
         if (ShardParams.getShardsTolerantAsBool(rb.req.getParams())) {
           continue; // looks like a shard did not return anything
@@ -141,17 +137,17 @@ public class StatsComponent extends SearchComponent {
 
   /**
    * Given a map of {@link StatsValues} using the appropriate response key,
-   * builds up the necessary "stats" data structure for including in the response -- 
+   * builds up the necessary "stats" data structure for including in the response --
    * including the esoteric "stats_fields" wrapper.
    */
   public static NamedList<NamedList<NamedList<?>>> convertToResponse
-    (Map<String,StatsValues> statsValues) {
+  (Map<String, StatsValues> statsValues) {
 
     NamedList<NamedList<NamedList<?>>> stats = new SimpleOrderedMap<>();
     NamedList<NamedList<?>> stats_fields = new SimpleOrderedMap<>();
     stats.add("stats_fields", stats_fields);
-    
-    for (Map.Entry<String,StatsValues> entry : statsValues.entrySet()) {
+
+    for (Map.Entry<String, StatsValues> entry : statsValues.entrySet()) {
       String key = entry.getKey();
       NamedList stv = entry.getValue().getStatsValues();
       stats_fields.add(key, stv);
@@ -169,87 +165,3 @@ public class StatsComponent extends SearchComponent {
   }
 }
 
-/**
- * Models all of the information about stats needed for a single request
- * @see StatsField
- */
-class StatsInfo {
-
-  private final ResponseBuilder rb;
-  private final List<StatsField> statsFields = new ArrayList<>(7);
-  private final Map<String, StatsValues> distribStatsValues = new LinkedHashMap<>();
-  private final Map<String, StatsField> statsFieldMap = new LinkedHashMap<>();
-  private final Map<String, List<StatsField>> tagToStatsFields = new LinkedHashMap<>();
-
-  public StatsInfo(ResponseBuilder rb) { 
-    this.rb = rb;
-    SolrParams params = rb.req.getParams();
-    String[] statsParams = params.getParams(StatsParams.STATS_FIELD);
-    if (null == statsParams) {
-      // no stats.field params, nothing to parse.
-      return;
-    }
-    
-    for (String paramValue : statsParams) {
-      StatsField current = new StatsField(rb, paramValue);
-      statsFields.add(current);
-      for (String tag : current.getTagList()) {
-        List<StatsField> fieldList = tagToStatsFields.get(tag);
-        if (fieldList == null) {
-          fieldList = new ArrayList<>();
-        }
-        fieldList.add(current);
-        tagToStatsFields.put(tag, fieldList);
-      }
-      statsFieldMap.put(current.getOutputKey(), current);
-      distribStatsValues.put(current.getOutputKey(), 
-                             StatsValuesFactory.createStatsValues(current));
-    }
-  }
-
-  /**
-   * Returns an immutable list of {@link StatsField} instances
-   * modeling each of the {@link StatsParams#STATS_FIELD} params specified
-   * as part of this request
-   */
-  public List<StatsField> getStatsFields() {
-    return Collections.unmodifiableList(statsFields);
-  }
-
-  /**
-   * Returns the {@link StatsField} associated with the specified (effective) 
-   * outputKey, or null if there was no {@link StatsParams#STATS_FIELD} param
-   * that would corrispond with that key.
-   */
-  public StatsField getStatsField(String outputKey) {
-    return statsFieldMap.get(outputKey);
-  }
-
-  /**
-   * Return immutable list of {@link StatsField} instances by string tag local parameter.
-   *
-   * @param tag tag local parameter
-   * @return list of stats fields
-   */
-  public List<StatsField> getStatsFieldsByTag(String tag) {
-    List<StatsField> raw = tagToStatsFields.get(tag);
-    if (null == raw) {
-      return Collections.emptyList();
-    } else {
-      return Collections.unmodifiableList(raw);
-    }
-  }
-
-  /**
-   * Returns an immutable map of response key =&gt; {@link StatsValues}
-   * instances for the current distributed request.  
-   * Depending on where we are in the process of handling this request, 
-   * these {@link StatsValues} instances may not be complete -- but they 
-   * will never be null.
-   */
-  public Map<String, StatsValues> getAggregateStatsValues() {
-    return Collections.unmodifiableMap(distribStatsValues);
-  }
-
-}
-
diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java b/solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java
new file mode 100644
index 0000000..f3f2871
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/component/StatsInfo.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.component;
+
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.params.StatsParams;
+
+import java.util.*;
+
+/**
+ * Models all of the information about stats needed for a single request
+ *
+ * @see StatsField
+ */
+class StatsInfo {
+
+    private final ResponseBuilder rb;
+    private final List<StatsField> statsFields = new ArrayList<>(7);
+    private final Map<String, StatsValues> distribStatsValues = new LinkedHashMap<>();
+    private final Map<String, StatsField> statsFieldMap = new LinkedHashMap<>();
+    private final Map<String, List<StatsField>> tagToStatsFields = new LinkedHashMap<>();
+
+    public StatsInfo(ResponseBuilder rb) {
+        this.rb = rb;
+        SolrParams params = rb.req.getParams();
+        String[] statsParams = params.getParams(StatsParams.STATS_FIELD);
+        if (null == statsParams) {
+            // no stats.field params, nothing to parse.
+            return;
+        }
+
+        for (String paramValue : statsParams) {
+            StatsField current = new StatsField(rb, paramValue);
+            statsFields.add(current);
+            for (String tag : current.getTagList()) {
+                List<StatsField> fieldList = tagToStatsFields.get(tag);
+                if (fieldList == null) {
+                    fieldList = new ArrayList<>();
+                }
+                fieldList.add(current);
+                tagToStatsFields.put(tag, fieldList);
+            }
+            statsFieldMap.put(current.getOutputKey(), current);
+            distribStatsValues.put(current.getOutputKey(),
+                    StatsValuesFactory.createStatsValues(current));
+        }
+    }
+
+    /**
+     * Returns an immutable list of {@link StatsField} instances
+     * modeling each of the {@link StatsParams#STATS_FIELD} params specified
+     * as part of this request
+     */
+    public List<StatsField> getStatsFields() {
+        return Collections.unmodifiableList(statsFields);
+    }
+
+    /**
+     * Returns the {@link StatsField} associated with the specified (effective)
+     * outputKey, or null if there was no {@link StatsParams#STATS_FIELD} param
+     * that would corrispond with that key.
+     */
+    public StatsField getStatsField(String outputKey) {
+        return statsFieldMap.get(outputKey);
+    }
+
+    /**
+     * Return immutable list of {@link StatsField} instances by string tag local parameter.
+     *
+     * @param tag tag local parameter
+     * @return list of stats fields
+     */
+    public List<StatsField> getStatsFieldsByTag(String tag) {
+        List<StatsField> raw = tagToStatsFields.get(tag);
+        if (null == raw) {
+            return Collections.emptyList();
+        } else {
+            return Collections.unmodifiableList(raw);
+        }
+    }
+
+    /**
+     * Returns an immutable map of response key =&gt; {@link StatsValues}
+     * instances for the current distributed request.
+     * Depending on where we are in the process of handling this request,
+     * these {@link StatsValues} instances may not be complete -- but they
+     * will never be null.
+     */
+    public Map<String, StatsValues> getAggregateStatsValues() {
+        return Collections.unmodifiableMap(distribStatsValues);
+    }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java b/solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java
similarity index 69%
rename from solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java
rename to solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java
index 50341fd..6973948 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/DoubleCmp.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/DoubleComp.java
@@ -19,25 +19,27 @@ package org.apache.solr.handler.export;
 
 interface DoubleComp {
   int compare(double a, double b);
+
   double resetValue();
-}
 
-class DoubleAsc implements DoubleComp {
-  public double resetValue() {
-    return Double.MAX_VALUE;
-  }
 
-  public int compare(double a, double b) {
-    return Double.compare(b, a);
-  }
-}
+  static class DoubleAsc implements DoubleComp {
+    public double resetValue() {
+      return Double.MAX_VALUE;
+    }
 
-class DoubleDesc implements DoubleComp {
-  public double resetValue() {
-    return -Double.MAX_VALUE;
+    public int compare(double a, double b) {
+      return Double.compare(b, a);
+    }
   }
 
-  public int compare(double a, double b) {
-    return Double.compare(a, b);
+  static class DoubleDesc implements DoubleComp {
+    public double resetValue() {
+      return -Double.MAX_VALUE;
+    }
+
+    public int compare(double a, double b) {
+      return Double.compare(a, b);
+    }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
index e4d6da0..adacd77 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
@@ -408,41 +408,41 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
 
       if (ft instanceof IntValueFieldType) {
         if (reverse) {
-          sortValues[i] = new IntValue(field, new IntDesc());
+          sortValues[i] = new IntValue(field, new IntComp.IntDesc());
         } else {
-          sortValues[i] = new IntValue(field, new IntAsc());
+          sortValues[i] = new IntValue(field, new IntComp.IntAsc());
         }
       } else if (ft instanceof FloatValueFieldType) {
         if (reverse) {
-          sortValues[i] = new FloatValue(field, new FloatDesc());
+          sortValues[i] = new FloatValue(field, new FloatComp.FloatDesc());
         } else {
-          sortValues[i] = new FloatValue(field, new FloatAsc());
+          sortValues[i] = new FloatValue(field, new FloatComp.FloatAsc());
         }
       } else if (ft instanceof DoubleValueFieldType) {
         if (reverse) {
-          sortValues[i] = new DoubleValue(field, new DoubleDesc());
+          sortValues[i] = new DoubleValue(field, new DoubleComp.DoubleDesc());
         } else {
-          sortValues[i] = new DoubleValue(field, new DoubleAsc());
+          sortValues[i] = new DoubleValue(field, new DoubleComp.DoubleAsc());
         }
       } else if (ft instanceof LongValueFieldType) {
         if (reverse) {
-          sortValues[i] = new LongValue(field, new LongDesc());
+          sortValues[i] = new LongValue(field, new LongComp.LongDesc());
         } else {
-          sortValues[i] = new LongValue(field, new LongAsc());
+          sortValues[i] = new LongValue(field, new LongComp.LongAsc());
         }
       } else if (ft instanceof StrField || ft instanceof SortableTextField) {
         LeafReader reader = searcher.getSlowAtomicReader();
         SortedDocValues vals = reader.getSortedDocValues(field);
         if (reverse) {
-          sortValues[i] = new StringValue(vals, field, new IntDesc());
+          sortValues[i] = new StringValue(vals, field, new IntComp.IntDesc());
         } else {
-          sortValues[i] = new StringValue(vals, field, new IntAsc());
+          sortValues[i] = new StringValue(vals, field, new IntComp.IntAsc());
         }
       } else if (ft instanceof DateValueFieldType) {
         if (reverse) {
-          sortValues[i] = new LongValue(field, new LongDesc());
+          sortValues[i] = new LongValue(field, new LongComp.LongDesc());
         } else {
-          sortValues[i] = new LongValue(field, new LongAsc());
+          sortValues[i] = new LongValue(field, new LongComp.LongAsc());
         }
       } else if (ft instanceof BoolField) {
         // This is a bit of a hack, but since the boolean field stores ByteRefs, just like Strings
@@ -451,9 +451,9 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
         LeafReader reader = searcher.getSlowAtomicReader();
         SortedDocValues vals = reader.getSortedDocValues(field);
         if (reverse) {
-          sortValues[i] = new StringValue(vals, field, new IntDesc());
+          sortValues[i] = new StringValue(vals, field, new IntComp.IntDesc());
         } else {
-          sortValues[i] = new StringValue(vals, field, new IntAsc());
+          sortValues[i] = new StringValue(vals, field, new IntComp.IntAsc());
         }
       } else {
         throw new IOException("Sort fields must be one of the following types: int,float,long,double,string,date,boolean,SortableText");
diff --git a/solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java b/solr/core/src/java/org/apache/solr/handler/export/FloatComp.java
similarity index 70%
rename from solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java
rename to solr/core/src/java/org/apache/solr/handler/export/FloatComp.java
index 7ef078c..1ce6e57 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/FloatCmp.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/FloatComp.java
@@ -19,26 +19,26 @@ package org.apache.solr.handler.export;
 
 interface FloatComp {
   int compare(float a, float b);
+
   float resetValue();
-}
 
-class FloatAsc implements FloatComp {
-  public float resetValue() {
-    return Float.MAX_VALUE;
-  }
+  static class FloatAsc implements FloatComp {
+    public float resetValue() {
+      return Float.MAX_VALUE;
+    }
 
-  public int compare(float a, float b) {
-    return Float.compare(b, a);
+    public int compare(float a, float b) {
+      return Float.compare(b, a);
+    }
   }
-}
 
-class FloatDesc implements FloatComp {
-  public float resetValue() {
-    return -Float.MAX_VALUE;
-  }
+  static class FloatDesc implements FloatComp {
+    public float resetValue() {
+      return -Float.MAX_VALUE;
+    }
 
-  public int compare(float a, float b) {
-    return Float.compare(a, b);
+    public int compare(float a, float b) {
+      return Float.compare(a, b);
+    }
   }
 }
-
diff --git a/solr/core/src/java/org/apache/solr/handler/export/IntComp.java b/solr/core/src/java/org/apache/solr/handler/export/IntComp.java
index ac83d5d..b44ebc8 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/IntComp.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/IntComp.java
@@ -19,27 +19,29 @@ package org.apache.solr.handler.export;
 
 public interface IntComp {
   int compare(int a, int b);
+
   int resetValue();
-}
 
-class IntAsc implements IntComp {
 
-  public int resetValue() {
-    return Integer.MAX_VALUE;
-  }
+  static class IntAsc implements IntComp {
 
-  public int compare(int a, int b) {
-    return Integer.compare(b, a);
+    public int resetValue() {
+      return Integer.MAX_VALUE;
+    }
+
+    public int compare(int a, int b) {
+      return Integer.compare(b, a);
+    }
   }
-}
 
-class IntDesc implements IntComp {
+  static class IntDesc implements IntComp {
 
-  public int resetValue() {
-    return Integer.MIN_VALUE;
-  }
+    public int resetValue() {
+      return Integer.MIN_VALUE;
+    }
 
-  public int compare(int a, int b) {
-    return Integer.compare(a, b);
+    public int compare(int a, int b) {
+      return Integer.compare(a, b);
+    }
   }
-}
+}
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/handler/export/LongCmp.java b/solr/core/src/java/org/apache/solr/handler/export/LongComp.java
similarity index 70%
rename from solr/core/src/java/org/apache/solr/handler/export/LongCmp.java
rename to solr/core/src/java/org/apache/solr/handler/export/LongComp.java
index 7d997ac..45a522c 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/LongCmp.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/LongComp.java
@@ -19,27 +19,28 @@ package org.apache.solr.handler.export;
 
 interface LongComp {
   int compare(long a, long b);
+
   long resetValue();
-}
 
-class LongAsc implements LongComp {
+  static class LongAsc implements LongComp {
 
-  public long resetValue() {
-    return Long.MAX_VALUE;
-  }
+    public long resetValue() {
+      return Long.MAX_VALUE;
+    }
 
-  public int compare(long a, long b) {
-    return Long.compare(b, a);
+    public int compare(long a, long b) {
+      return Long.compare(b, a);
+    }
   }
-}
 
-class LongDesc implements LongComp {
+  static class LongDesc implements LongComp {
 
-  public long resetValue() {
-    return Long.MIN_VALUE;
-  }
+    public long resetValue() {
+      return Long.MIN_VALUE;
+    }
 
-  public int compare(long a, long b) {
-    return Long.compare(a, b);
+    public int compare(long a, long b) {
+      return Long.compare(a, b);
+    }
   }
-}
+}
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java
index e65bb93..e01ba28 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/ShardAugmenterFactory.java
@@ -38,7 +38,7 @@ public class ShardAugmenterFactory extends TransformerFactory
         v = "[not a shard request]";
       }
     }
-    return new ValueAugmenter( field, v );
+    return new ValueAugmenterFactory.ValueAugmenter( field, v );
   }
 }
 
diff --git a/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java b/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java
index d85a302..178fae1 100644
--- a/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java
+++ b/solr/core/src/java/org/apache/solr/response/transform/ValueAugmenterFactory.java
@@ -28,31 +28,28 @@ import org.apache.solr.util.DateMathParser;
  *
  * @since solr 4.0
  */
-public class ValueAugmenterFactory extends TransformerFactory
-{
+public class ValueAugmenterFactory extends TransformerFactory {
   protected Object value = null;
   protected Object defaultValue = null;
 
   @Override
   public void init(NamedList args) {
-    value = args.get( "value" );
-    if( value == null ) {
-      defaultValue = args.get( "defaultValue" );
+    value = args.get("value");
+    if (value == null) {
+      defaultValue = args.get("defaultValue");
     }
   }
 
-  public static Object getObjectFrom( String val, String type )
-  {
-    if( type != null ) {
+  public static Object getObjectFrom(String val, String type) {
+    if (type != null) {
       try {
-        if( "int".equals( type ) ) return Integer.valueOf( val );
-        if( "double".equals( type ) ) return Double.valueOf( val );
-        if( "float".equals( type ) ) return Float.valueOf( val );
-        if( "date".equals( type ) ) return DateMathParser.parseMath(null, val );
-      }
-      catch( Exception ex ) {
-        throw new SolrException( ErrorCode.BAD_REQUEST,
-            "Unable to parse "+type+"="+val, ex );
+        if ("int".equals(type)) return Integer.valueOf(val);
+        if ("double".equals(type)) return Double.valueOf(val);
+        if ("float".equals(type)) return Float.valueOf(val);
+        if ("date".equals(type)) return DateMathParser.parseMath(null, val);
+      } catch (Exception ex) {
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+                "Unable to parse " + type + "=" + val, ex);
       }
     }
     return val;
@@ -61,43 +58,40 @@ public class ValueAugmenterFactory extends TransformerFactory
   @Override
   public DocTransformer create(String field, SolrParams params, SolrQueryRequest req) {
     Object val = value;
-    if( val == null ) {
+    if (val == null) {
       String v = params.get("v");
-      if( v == null ) {
+      if (v == null) {
         val = defaultValue;
-      }
-      else {
+      } else {
         val = getObjectFrom(v, params.get("t"));
       }
-      if( val == null ) {
-        throw new SolrException( ErrorCode.BAD_REQUEST,
-            "ValueAugmenter is missing a value -- should be defined in solrconfig or inline" );
+      if (val == null) {
+        throw new SolrException(ErrorCode.BAD_REQUEST,
+                "ValueAugmenter is missing a value -- should be defined in solrconfig or inline");
       }
     }
-    return new ValueAugmenter( field, val );
+    return new ValueAugmenter(field, val);
   }
-}
 
-class ValueAugmenter extends DocTransformer
-{
-  final String name;
-  final Object value;
 
-  public ValueAugmenter( String name, Object value )
-  {
-    this.name = name;
-    this.value = value;
-  }
+  static class ValueAugmenter extends DocTransformer {
+    final String name;
+    final Object value;
 
-  @Override
-  public String getName()
-  {
-    return name;
-  }
+    public ValueAugmenter(String name, Object value) {
+      this.name = name;
+      this.value = value;
+    }
 
-  @Override
-  public void transform(SolrDocument doc, int docid) {
-    doc.setField( name, value );
+    @Override
+    public String getName() {
+      return name;
+    }
+
+    @Override
+    public void transform(SolrDocument doc, int docid) {
+      doc.setField(name, value);
+    }
   }
 }
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java b/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java
index 308228b..d8bb697 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetParser.java
@@ -16,18 +16,22 @@
  */
 package org.apache.solr.search.facet;
 
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.Optional;
+
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.search.FunctionQParser;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.search.FunctionQParser;
+import org.apache.solr.search.QParser;
 import org.apache.solr.search.SyntaxError;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
+import static org.apache.solr.common.params.CommonParams.SORT;
 
 abstract class FacetParser<FacetRequestT extends FacetRequest> {
   protected FacetRequestT facet;
@@ -134,9 +138,9 @@ abstract class FacetParser<FacetRequestT extends FacetRequest> {
     switch (type) {
       case "field":
       case "terms":
-        return new FacetRequest.FacetFieldParser(this, key).parse(args);
+        return new FacetFieldParser(this, key).parse(args);
       case "query":
-        return new FacetRequest.FacetQueryParser(this, key).parse(args);
+        return new FacetQueryParser(this, key).parse(args);
       case "range":
         return new FacetRangeParser(this, key).parse(args);
       case "heatmap":
@@ -411,4 +415,223 @@ abstract class FacetParser<FacetRequestT extends FacetRequest> {
     nl.addAll(jsonObject);
     return SolrParams.toSolrParams(nl);
   }
+
+  // TODO Make this private (or at least not static) and introduce
+  // a newInstance method on FacetParser that returns one of these?
+  static class FacetTopParser extends FacetParser<FacetQuery> {
+    private SolrQueryRequest req;
+
+    public FacetTopParser(SolrQueryRequest req) {
+      super(null, "facet");
+      this.facet = new FacetQuery();
+      this.req = req;
+    }
+
+    @Override
+    public FacetQuery parse(Object args) throws SyntaxError {
+      parseSubs(args);
+      return facet;
+    }
+
+    @Override
+    public SolrQueryRequest getSolrRequest() {
+      return req;
+    }
+
+    @Override
+    public IndexSchema getSchema() {
+      return req.getSchema();
+    }
+  }
+
+  static class FacetQueryParser extends FacetParser<FacetQuery> {
+    public FacetQueryParser(@SuppressWarnings("rawtypes") FacetParser parent, String key) {
+      super(parent, key);
+      facet = new FacetQuery();
+    }
+
+    @Override
+    public FacetQuery parse(Object arg) throws SyntaxError {
+      parseCommonParams(arg);
+
+      String qstring = null;
+      if (arg instanceof String) {
+        // just the field name...
+        qstring = (String)arg;
+
+      } else if (arg instanceof Map) {
+        @SuppressWarnings({"unchecked"})
+        Map<String, Object> m = (Map<String, Object>) arg;
+        qstring = getString(m, "q", null);
+        if (qstring == null) {
+          qstring = getString(m, "query", null);
+        }
+
+        // OK to parse subs before we have parsed our own query?
+        // as long as subs don't need to know about it.
+        parseSubs( m.get("facet") );
+      } else if (arg != null) {
+        // something lke json.facet.facet.query=2
+        throw err("Expected string/map for facet query, received " + arg.getClass().getSimpleName() + "=" + arg);
+      }
+
+      // TODO: substats that are from defaults!!!
+
+      if (qstring != null) {
+        QParser parser = QParser.getParser(qstring, getSolrRequest());
+        parser.setIsFilter(true);
+        facet.q = parser.getQuery();
+      }
+
+      return facet;
+    }
+  }
+
+  /*** not a separate type of parser for now...
+   static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
+   public FacetBlockParentParser(FacetParser parent, String key) {
+   super(parent, key);
+   facet = new FacetBlockParent();
+   }
+
+   @Override
+   public FacetBlockParent parse(Object arg) throws SyntaxError {
+   parseCommonParams(arg);
+
+   if (arg instanceof String) {
+   // just the field name...
+   facet.parents = (String)arg;
+
+   } else if (arg instanceof Map) {
+   Map<String, Object> m = (Map<String, Object>) arg;
+   facet.parents = getString(m, "parents", null);
+
+   parseSubs( m.get("facet") );
+   }
+
+   return facet;
+   }
+   }
+   ***/
+
+  static class FacetFieldParser extends FacetParser<FacetField> {
+    @SuppressWarnings({"rawtypes"})
+    public FacetFieldParser(FacetParser parent, String key) {
+      super(parent, key);
+      facet = new FacetField();
+    }
+
+    public FacetField parse(Object arg) throws SyntaxError {
+      parseCommonParams(arg);
+      if (arg instanceof String) {
+        // just the field name...
+        facet.field = (String)arg;
+
+      } else if (arg instanceof Map) {
+        @SuppressWarnings({"unchecked"})
+        Map<String, Object> m = (Map<String, Object>) arg;
+        facet.field = getField(m);
+        facet.offset = getLong(m, "offset", facet.offset);
+        facet.limit = getLong(m, "limit", facet.limit);
+        facet.overrequest = (int) getLong(m, "overrequest", facet.overrequest);
+        facet.overrefine = (int) getLong(m, "overrefine", facet.overrefine);
+        if (facet.limit == 0) facet.offset = 0;  // normalize.  an offset with a limit of non-zero isn't useful.
+        facet.mincount = getLong(m, "mincount", facet.mincount);
+        facet.missing = getBoolean(m, "missing", facet.missing);
+        facet.numBuckets = getBoolean(m, "numBuckets", facet.numBuckets);
+        facet.prefix = getString(m, "prefix", facet.prefix);
+        facet.allBuckets = getBoolean(m, "allBuckets", facet.allBuckets);
+        facet.method = FacetField.FacetMethod.fromString(getString(m, "method", null));
+        facet.cacheDf = (int)getLong(m, "cacheDf", facet.cacheDf);
+
+        // TODO: pull up to higher level?
+        facet.refine = FacetRequest.RefineMethod.fromObj(m.get("refine"));
+
+        facet.perSeg = getBooleanOrNull(m, "perSeg");
+
+        // facet.sort may depend on a facet stat...
+        // should we be parsing / validating this here, or in the execution environment?
+        Object o = m.get("facet");
+        parseSubs(o);
+
+        facet.sort = parseAndValidateSort(facet, m, SORT);
+        facet.prelim_sort = parseAndValidateSort(facet, m, "prelim_sort");
+      } else if (arg != null) {
+        // something like json.facet.facet.field=2
+        throw err("Expected string/map for facet field, received " + arg.getClass().getSimpleName() + "=" + arg);
+      }
+
+      if (null == facet.sort) {
+        facet.sort = FacetRequest.FacetSort.COUNT_DESC;
+      }
+
+      return facet;
+    }
+
+    /**
+     * Parses, validates and returns the {@link FacetRequest.FacetSort} for given sortParam
+     * and facet field
+     * <p>
+     *   Currently, supported sort specifications are 'mystat desc' OR {mystat: 'desc'}
+     *   index - This is equivalent to 'index asc'
+     *   count - This is equivalent to 'count desc'
+     * </p>
+     *
+     * @param facet {@link FacetField} for which sort needs to be parsed and validated
+     * @param args map containing the sortVal for given sortParam
+     * @param sortParam parameter for which sort needs to parsed and validated
+     * @return parsed facet sort
+     */
+    private static FacetRequest.FacetSort parseAndValidateSort(FacetField facet, Map<String, Object> args, String sortParam) {
+      Object sort = args.get(sortParam);
+      if (sort == null) {
+        return null;
+      }
+
+      FacetRequest.FacetSort facetSort = null;
+
+      if (sort instanceof String) {
+        String sortStr = (String)sort;
+        if (sortStr.endsWith(" asc")) {
+          facetSort =  new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()),
+                  FacetRequest.SortDirection.asc);
+        } else if (sortStr.endsWith(" desc")) {
+          facetSort =  new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()),
+                  FacetRequest.SortDirection.desc);
+        } else {
+          facetSort =  new FacetRequest.FacetSort(sortStr,
+                  // default direction for "index" is ascending
+                  ("index".equals(sortStr)
+                          ? FacetRequest.SortDirection.asc
+                          : FacetRequest.SortDirection.desc));
+        }
+      } else if (sort instanceof Map) {
+        // { myvar : 'desc' }
+        @SuppressWarnings("unchecked")
+        Optional<Map.Entry<String,Object>> optional = ((Map<String,Object>)sort).entrySet().stream().findFirst();
+        if (optional.isPresent()) {
+          Map.Entry<String, Object> entry = optional.get();
+          facetSort = new FacetRequest.FacetSort(entry.getKey(), FacetRequest.SortDirection.fromObj(entry.getValue()));
+        }
+      } else {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+                "Expected string/map for '" + sortParam +"', received "+ sort.getClass().getSimpleName() + "=" + sort);
+      }
+
+      Map<String, AggValueSource> facetStats = facet.facetStats;
+      // validate facet sort
+      boolean isValidSort = facetSort == null ||
+              "index".equals(facetSort.sortVariable) ||
+              "count".equals(facetSort.sortVariable) ||
+              (facetStats != null && facetStats.containsKey(facetSort.sortVariable));
+
+      if (!isValidSort) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+                "Invalid " + sortParam + " option '" + sort + "' for field '" + facet.field + "'");
+      }
+      return facetSort;
+    }
+
+  }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 42f8488..db9d9c9 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -21,16 +21,13 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import java.util.Optional;
 
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.search.DocSet;
 import org.apache.solr.search.JoinQParserPlugin;
-import org.apache.solr.search.QParser;
 import org.apache.solr.search.QueryContext;
 import org.apache.solr.search.SolrConstantScoreQuery;
 import org.apache.solr.search.SyntaxError;
@@ -38,7 +35,6 @@ import org.apache.solr.search.join.GraphQuery;
 import org.apache.solr.search.join.GraphQueryParser;
 import org.apache.solr.util.RTimer;
 
-import static org.apache.solr.common.params.CommonParams.SORT;
 import static org.apache.solr.search.facet.FacetRequest.RefineMethod.NONE;
 
 /**
@@ -302,7 +298,7 @@ public abstract class FacetRequest {
    */
   public static FacetRequest parse(SolrQueryRequest req, Map<String, Object> params) {
     @SuppressWarnings({"rawtypes"})
-    FacetParser parser = new FacetTopParser(req);
+    FacetParser parser = new FacetParser.FacetTopParser(req);
     try {
       return parser.parse(params);
     } catch (SyntaxError syntaxError) {
@@ -321,7 +317,7 @@ public abstract class FacetRequest {
    */
   public static FacetRequest parseOneFacetReq(SolrQueryRequest req, Map<String, Object> params) {
     @SuppressWarnings("rawtypes")
-    FacetParser parser = new FacetTopParser(req);
+    FacetParser parser = new FacetParser.FacetTopParser(req);
     try {
       return (FacetRequest) parser.parseFacetOrStat("", params);
     } catch (SyntaxError syntaxError) {
@@ -437,221 +433,6 @@ public abstract class FacetRequest {
   
   public abstract Map<String, Object> getFacetDescription();
 
-  static class FacetTopParser extends FacetParser<FacetQuery> {
-    private SolrQueryRequest req;
-
-    public FacetTopParser(SolrQueryRequest req) {
-      super(null, "facet");
-      this.facet = new FacetQuery();
-      this.req = req;
-    }
-
-    @Override
-    public FacetQuery parse(Object args) throws SyntaxError {
-      parseSubs(args);
-      return facet;
-    }
-
-    @Override
-    public SolrQueryRequest getSolrRequest() {
-      return req;
-    }
-
-    @Override
-    public IndexSchema getSchema() {
-      return req.getSchema();
-    }
-  }
-
-  static class FacetQueryParser extends FacetParser<FacetQuery> {
-    public FacetQueryParser(@SuppressWarnings("rawtypes") FacetParser parent, String key) {
-      super(parent, key);
-      facet = new FacetQuery();
-    }
-
-    @Override
-    public FacetQuery parse(Object arg) throws SyntaxError {
-      parseCommonParams(arg);
-
-      String qstring = null;
-      if (arg instanceof String) {
-        // just the field name...
-        qstring = (String)arg;
-
-      } else if (arg instanceof Map) {
-        @SuppressWarnings({"unchecked"})
-        Map<String, Object> m = (Map<String, Object>) arg;
-        qstring = getString(m, "q", null);
-        if (qstring == null) {
-          qstring = getString(m, "query", null);
-        }
-
-        // OK to parse subs before we have parsed our own query?
-        // as long as subs don't need to know about it.
-        parseSubs( m.get("facet") );
-      } else if (arg != null) {
-        // something lke json.facet.facet.query=2
-        throw err("Expected string/map for facet query, received " + arg.getClass().getSimpleName() + "=" + arg);
-      }
-
-      // TODO: substats that are from defaults!!!
-
-      if (qstring != null) {
-        QParser parser = QParser.getParser(qstring, getSolrRequest());
-        parser.setIsFilter(true);
-        facet.q = parser.getQuery();
-      }
-
-      return facet;
-    }
-  }
-
-/*** not a separate type of parser for now...
-static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
- public FacetBlockParentParser(FacetParser parent, String key) {
- super(parent, key);
- facet = new FacetBlockParent();
- }
-
- @Override
- public FacetBlockParent parse(Object arg) throws SyntaxError {
- parseCommonParams(arg);
-
- if (arg instanceof String) {
- // just the field name...
- facet.parents = (String)arg;
-
- } else if (arg instanceof Map) {
- Map<String, Object> m = (Map<String, Object>) arg;
- facet.parents = getString(m, "parents", null);
-
- parseSubs( m.get("facet") );
- }
-
- return facet;
- }
- }
- ***/
-
-  static class FacetFieldParser extends FacetParser<FacetField> {
-    @SuppressWarnings({"rawtypes"})
-    public FacetFieldParser(FacetParser parent, String key) {
-      super(parent, key);
-      facet = new FacetField();
-    }
-
-    public FacetField parse(Object arg) throws SyntaxError {
-      parseCommonParams(arg);
-      if (arg instanceof String) {
-        // just the field name...
-        facet.field = (String)arg;
-
-      } else if (arg instanceof Map) {
-        @SuppressWarnings({"unchecked"})
-        Map<String, Object> m = (Map<String, Object>) arg;
-        facet.field = getField(m);
-        facet.offset = getLong(m, "offset", facet.offset);
-        facet.limit = getLong(m, "limit", facet.limit);
-        facet.overrequest = (int) getLong(m, "overrequest", facet.overrequest);
-        facet.overrefine = (int) getLong(m, "overrefine", facet.overrefine);
-        if (facet.limit == 0) facet.offset = 0;  // normalize.  an offset with a limit of non-zero isn't useful.
-        facet.mincount = getLong(m, "mincount", facet.mincount);
-        facet.missing = getBoolean(m, "missing", facet.missing);
-        facet.numBuckets = getBoolean(m, "numBuckets", facet.numBuckets);
-        facet.prefix = getString(m, "prefix", facet.prefix);
-        facet.allBuckets = getBoolean(m, "allBuckets", facet.allBuckets);
-        facet.method = FacetField.FacetMethod.fromString(getString(m, "method", null));
-        facet.cacheDf = (int)getLong(m, "cacheDf", facet.cacheDf);
-
-        // TODO: pull up to higher level?
-        facet.refine = RefineMethod.fromObj(m.get("refine"));
-
-        facet.perSeg = getBooleanOrNull(m, "perSeg");
-
-        // facet.sort may depend on a facet stat...
-        // should we be parsing / validating this here, or in the execution environment?
-        Object o = m.get("facet");
-        parseSubs(o);
-
-        facet.sort = parseAndValidateSort(facet, m, SORT);
-        facet.prelim_sort = parseAndValidateSort(facet, m, "prelim_sort");
-      } else if (arg != null) {
-        // something like json.facet.facet.field=2
-        throw err("Expected string/map for facet field, received " + arg.getClass().getSimpleName() + "=" + arg);
-      }
-
-      if (null == facet.sort) {
-        facet.sort = FacetSort.COUNT_DESC;
-      }
-
-      return facet;
-    }
-
-    /**
-     * Parses, validates and returns the {@link FacetSort} for given sortParam
-     * and facet field
-     * <p>
-     *   Currently, supported sort specifications are 'mystat desc' OR {mystat: 'desc'}
-     *   index - This is equivalent to 'index asc'
-     *   count - This is equivalent to 'count desc'
-     * </p>
-     *
-     * @param facet {@link FacetField} for which sort needs to be parsed and validated
-     * @param args map containing the sortVal for given sortParam
-     * @param sortParam parameter for which sort needs to parsed and validated
-     * @return parsed facet sort
-     */
-    private static FacetSort parseAndValidateSort(FacetField facet, Map<String, Object> args, String sortParam) {
-      Object sort = args.get(sortParam);
-      if (sort == null) {
-        return null;
-      }
-
-      FacetSort facetSort = null;
-
-      if (sort instanceof String) {
-        String sortStr = (String)sort;
-        if (sortStr.endsWith(" asc")) {
-          facetSort =  new FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()),
-              SortDirection.asc);
-        } else if (sortStr.endsWith(" desc")) {
-          facetSort =  new FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()),
-              SortDirection.desc);
-        } else {
-          facetSort =  new FacetSort(sortStr,
-              // default direction for "index" is ascending
-              ("index".equals(sortStr)
-                  ? SortDirection.asc
-                  : SortDirection.desc));
-        }
-      } else if (sort instanceof Map) {
-        // { myvar : 'desc' }
-        @SuppressWarnings("unchecked")
-        Optional<Map.Entry<String,Object>> optional = ((Map<String,Object>)sort).entrySet().stream().findFirst();
-        if (optional.isPresent()) {
-          Map.Entry<String, Object> entry = optional.get();
-          facetSort = new FacetSort(entry.getKey(), SortDirection.fromObj(entry.getValue()));
-        }
-      } else {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Expected string/map for '" + sortParam +"', received "+ sort.getClass().getSimpleName() + "=" + sort);
-      }
-
-      Map<String, AggValueSource> facetStats = facet.facetStats;
-      // validate facet sort
-      boolean isValidSort = facetSort == null ||
-          "index".equals(facetSort.sortVariable) ||
-          "count".equals(facetSort.sortVariable) ||
-          (facetStats != null && facetStats.containsKey(facetSort.sortVariable));
-
-      if (!isValidSort) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-            "Invalid " + sortParam + " option '" + sort + "' for field '" + facet.field + "'");
-      }
-      return facetSort;
-    }
-
-  }
 
 }
 
diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java b/solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java
similarity index 59%
rename from solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
rename to solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java
index 6ca02d3..02ed123 100644
--- a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java
+++ b/solr/core/src/java/org/apache/solr/search/join/GraphEdgeCollector.java
@@ -53,7 +53,7 @@ abstract class GraphEdgeCollector extends SimpleCollector implements Collector {
   // known leaf nodes
   DocSet leafNodes;
 
-  int numHits=0;    // number of documents visited
+  int numHits = 0;    // number of documents visited
   BitSet bits;  // if not null, used to collect documents visited
 
   int base;
@@ -74,8 +74,10 @@ abstract class GraphEdgeCollector extends SimpleCollector implements Collector {
   }
 
   // the number of docs visited
-  public int getNumHits() { return numHits; }
-  
+  public int getNumHits() {
+    return numHits;
+  }
+
   public void collect(int segDoc) throws IOException {
     int doc = segDoc + base;
     if (skipSet != null && skipSet.exists(doc)) {
@@ -91,19 +93,19 @@ abstract class GraphEdgeCollector extends SimpleCollector implements Collector {
     // Optimization to not look up edges for a document that is a leaf node (i.e. has no outgoing edges)
     if (leafNodes == null || !leafNodes.exists(doc)) {
       addEdgeIdsToResult(segDoc);
-    } 
+    }
     // Note: tracking links in for each result would be a huge memory hog... so not implementing at this time.
   }
-  
+
   abstract void addEdgeIdsToResult(int doc) throws IOException;
-  
+
   private void addDocToResult(int docWithBase) {
     // this document is part of the traversal. mark it in our bitmap.
     bits.set(docWithBase);
     // increment the hit count so we know how many docs we traversed this time.
     numHits++;
   }
-  
+
   @Override
   public void doSetNextReader(LeafReaderContext context) throws IOException {
     base = context.docBase;
@@ -115,87 +117,90 @@ abstract class GraphEdgeCollector extends SimpleCollector implements Collector {
   public ScoreMode scoreMode() {
     return ScoreMode.COMPLETE_NO_SCORES;
   }
-  
-}
 
-class GraphTermsCollector extends GraphEdgeCollector {
-  // all the collected terms
-  private BytesRefHash collectorTerms;
-  private SortedSetDocValues docTermOrds;
 
+  static class GraphTermsCollector extends GraphEdgeCollector {
+    // all the collected terms
+    private BytesRefHash collectorTerms;
+    private SortedSetDocValues docTermOrds;
 
-  GraphTermsCollector(SchemaField collectField, DocSet skipSet, DocSet leafNodes) {
-    super(collectField, skipSet, leafNodes);
-    this.collectorTerms =  new BytesRefHash();
-  }
 
-  @Override
-  public void doSetNextReader(LeafReaderContext context) throws IOException {
-    super.doSetNextReader(context);
-    // Grab the updated doc values.
-    docTermOrds = DocValues.getSortedSet(context.reader(), collectField.getName());
-  }
+    GraphTermsCollector(SchemaField collectField, DocSet skipSet, DocSet leafNodes) {
+      super(collectField, skipSet, leafNodes);
+      this.collectorTerms = new BytesRefHash();
+    }
 
-  @Override
-  void addEdgeIdsToResult(int doc) throws IOException {
-    // set the doc to pull the edges ids for.
-    if (doc > docTermOrds.docID()) {
-      docTermOrds.advance(doc);
+    @Override
+    public void doSetNextReader(LeafReaderContext context) throws IOException {
+      super.doSetNextReader(context);
+      // Grab the updated doc values.
+      docTermOrds = DocValues.getSortedSet(context.reader(), collectField.getName());
     }
-    if (doc == docTermOrds.docID()) {
-      BytesRef edgeValue = new BytesRef();
-      long ord;
-      while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
-        edgeValue = docTermOrds.lookupOrd(ord);
-        // add the edge id to the collector terms.
-        collectorTerms.add(edgeValue);
+
+    @Override
+    void addEdgeIdsToResult(int doc) throws IOException {
+      // set the doc to pull the edges ids for.
+      if (doc > docTermOrds.docID()) {
+        docTermOrds.advance(doc);
+      }
+      if (doc == docTermOrds.docID()) {
+        BytesRef edgeValue = new BytesRef();
+        long ord;
+        while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
+          edgeValue = docTermOrds.lookupOrd(ord);
+          // add the edge id to the collector terms.
+          collectorTerms.add(edgeValue);
+        }
       }
     }
-  }
 
-  @Override
-  public Query getResultQuery(SchemaField matchField, boolean useAutomaton) {
-    if (collectorTerms == null || collectorTerms.size() == 0) {
-      // return null if there are no terms (edges) to traverse.
-      return null;
-    } else {
-      // Create a query
-      Query q = null;
-
-      // TODO: see if we should dynamically select this based on the frontier size.
-      if (useAutomaton) {
-        // build an automaton based query for the frontier.
-        Automaton autn = buildAutomaton(collectorTerms);
-        AutomatonQuery autnQuery = new AutomatonQuery(new Term(matchField.getName()), autn);
-        q = autnQuery;
+    @Override
+    public Query getResultQuery(SchemaField matchField, boolean useAutomaton) {
+      if (collectorTerms == null || collectorTerms.size() == 0) {
+        // return null if there are no terms (edges) to traverse.
+        return null;
       } else {
-        List<BytesRef> termList = new ArrayList<>(collectorTerms.size());
-        for (int i = 0 ; i < collectorTerms.size(); i++) {
-          BytesRef ref = new BytesRef();
-          collectorTerms.get(i, ref);
-          termList.add(ref);
+        // Create a query
+        Query q = null;
+
+        // TODO: see if we should dynamically select this based on the frontier size.
+        if (useAutomaton) {
+          // build an automaton based query for the frontier.
+          Automaton autn = buildAutomaton(collectorTerms);
+          AutomatonQuery autnQuery = new AutomatonQuery(new Term(matchField.getName()), autn);
+          q = autnQuery;
+        } else {
+          List<BytesRef> termList = new ArrayList<>(collectorTerms.size());
+          for (int i = 0; i < collectorTerms.size(); i++) {
+            BytesRef ref = new BytesRef();
+            collectorTerms.get(i, ref);
+            termList.add(ref);
+          }
+          q = (matchField.hasDocValues() && !matchField.indexed())
+                  ? new DocValuesTermsQuery(matchField.getName(), termList)
+                  : new TermInSetQuery(matchField.getName(), termList);
         }
-        q = (matchField.hasDocValues() && !matchField.indexed())
-            ? new DocValuesTermsQuery(matchField.getName(), termList)
-            : new TermInSetQuery(matchField.getName(), termList);
-      }
 
-      return q;
+        return q;
+      }
     }
-  }
 
 
-  /** Build an automaton to represent the frontier query */
-  private Automaton buildAutomaton(BytesRefHash termBytesHash) {
-    // need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?)
-    final TreeSet<BytesRef> terms = new TreeSet<BytesRef>();
-    for (int i = 0 ; i < termBytesHash.size(); i++) {
-      BytesRef ref = new BytesRef();
-      termBytesHash.get(i, ref);
-      terms.add(ref);
+    /**
+     * Build an automaton to represent the frontier query
+     */
+    private Automaton buildAutomaton(BytesRefHash termBytesHash) {
+      // need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?)
+      final TreeSet<BytesRef> terms = new TreeSet<BytesRef>();
+      for (int i = 0; i < termBytesHash.size(); i++) {
+        BytesRef ref = new BytesRef();
+        termBytesHash.get(i, ref);
+        terms.add(ref);
+      }
+      final Automaton a = DaciukMihovAutomatonBuilder.build(terms);
+      return a;
     }
-    final Automaton a = DaciukMihovAutomatonBuilder.build(terms);
-    return a;
+
   }
 }
 
diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
index 5bec599..c25679b 100644
--- a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java
@@ -200,7 +200,7 @@ public class GraphQuery extends Query {
           // Create the graph result collector for this level
           GraphEdgeCollector graphResultCollector = collectSchemaField.getType().isPointField()
               ? new GraphPointsCollector(collectSchemaField, new BitDocSet(resultBits), leafNodes)
-              : new GraphTermsCollector(collectSchemaField, new BitDocSet(resultBits), leafNodes);
+              : new GraphEdgeCollector.GraphTermsCollector(collectSchemaField, new BitDocSet(resultBits), leafNodes);
 
           fromSet = new BitDocSet(new FixedBitSet(capacity));
           graphResultCollector.setCollectDocs(fromSet.getBits());
diff --git a/solr/core/src/java/org/apache/solr/update/TransactionLog.java b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
index 9fd2f2f..e9ced2e 100644
--- a/solr/core/src/java/org/apache/solr/update/TransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/TransactionLog.java
@@ -69,7 +69,7 @@ public class TransactionLog implements Closeable {
   private boolean debug = log.isDebugEnabled();
   private boolean trace = log.isTraceEnabled();
 
-  public final static String END_MESSAGE="SOLR_TLOG_END";
+  public final static String END_MESSAGE = "SOLR_TLOG_END";
 
   long id;
   File tlogFile;
@@ -83,7 +83,7 @@ public class TransactionLog implements Closeable {
   protected volatile boolean deleteOnClose = true;  // we can delete old tlogs since they are currently only used for real-time-get (and in the future, recovery)
 
   AtomicInteger refcount = new AtomicInteger(1);
-  Map<String,Integer> globalStringMap = new HashMap<>();
+  Map<String, Integer> globalStringMap = new HashMap<>();
   List<String> globalStringList = new ArrayList<>();
 
   // write a BytesRef as a byte array
@@ -91,13 +91,13 @@ public class TransactionLog implements Closeable {
     @Override
     public Object resolve(Object o, JavaBinCodec codec) throws IOException {
       if (o instanceof BytesRef) {
-        BytesRef br = (BytesRef)o;
+        BytesRef br = (BytesRef) o;
         codec.writeByteArray(br.bytes, br.offset, br.length);
         return null;
       }
       // Fallback: we have no idea how to serialize this.  Be noisy to prevent insidious bugs
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-          "TransactionLog doesn't know how to serialize " + o.getClass() + "; try implementing ObjectResolver?");
+              "TransactionLog doesn't know how to serialize " + o.getClass() + "; try implementing ObjectResolver?");
     }
   };
 
@@ -167,12 +167,12 @@ public class TransactionLog implements Closeable {
     try {
       if (debug) {
         log.debug("New TransactionLog file= {}, exists={}, size={} openExisting={}"
-            , tlogFile, tlogFile.exists(), tlogFile.length(), openExisting);
+                , tlogFile, tlogFile.exists(), tlogFile.length(), openExisting);
       }
 
       // Parse tlog id from the filename
       String filename = tlogFile.getName();
-      id = Long.parseLong(filename.substring(filename.lastIndexOf('.')+1));
+      id = Long.parseLong(filename.substring(filename.lastIndexOf('.') + 1));
 
       this.tlogFile = tlogFile;
       raf = new RandomAccessFile(this.tlogFile, "rw");
@@ -197,7 +197,7 @@ public class TransactionLog implements Closeable {
           log.warn("New transaction log already exists:{} size={}", tlogFile, raf.length());
           return;
         }
-       
+
         if (start > 0) {
           raf.setLength(0);
         }
@@ -205,7 +205,7 @@ public class TransactionLog implements Closeable {
       }
 
       success = true;
-      
+
       assert ObjectReleaseTracker.track(this);
 
     } catch (IOException e) {
@@ -222,7 +222,8 @@ public class TransactionLog implements Closeable {
   }
 
   // for subclasses
-  protected TransactionLog() {}
+  protected TransactionLog() {
+  }
 
   /** Returns the number of records in the log (currently includes the header and an optional commit).
    * Note: currently returns 0 for reopened existing log files.
@@ -241,12 +242,12 @@ public class TransactionLog implements Closeable {
     }
 
     // the end of the file should have the end message (added during a commit) plus a 4 byte size
-    byte[] buf = new byte[ END_MESSAGE.length() ];
+    byte[] buf = new byte[END_MESSAGE.length()];
     long pos = size - END_MESSAGE.length() - 4;
     if (pos < 0) return false;
     @SuppressWarnings("resource") final ChannelFastInputStream is = new ChannelFastInputStream(channel, pos);
     is.read(buf);
-    for (int i=0; i<buf.length; i++) {
+    for (int i = 0; i < buf.length; i++) {
       if (buf[i] != END_MESSAGE.charAt(i)) return false;
     }
     return true;
@@ -269,17 +270,17 @@ public class TransactionLog implements Closeable {
     // read existing header
     fis = fis != null ? fis : new ChannelFastInputStream(channel, 0);
     @SuppressWarnings("resource") final LogCodec codec = new LogCodec(resolver);
-    Map header = (Map)codec.unmarshal(fis);
+    Map header = (Map) codec.unmarshal(fis);
 
     fis.readInt(); // skip size
 
     // needed to read other records
 
     synchronized (this) {
-      globalStringList = (List<String>)header.get("strings");
+      globalStringList = (List<String>) header.get("strings");
       globalStringMap = new HashMap<>(globalStringList.size());
-      for (int i=0; i<globalStringList.size(); i++) {
-        globalStringMap.put( globalStringList.get(i), i+1);
+      for (int i = 0; i < globalStringList.size(); i++) {
+        globalStringMap.put(globalStringList.get(i), i + 1);
       }
     }
   }
@@ -309,16 +310,16 @@ public class TransactionLog implements Closeable {
     long pos = fos.size();
     assert pos == 0;
 
-    Map header = new LinkedHashMap<String,Object>();
-    header.put("SOLR_TLOG",1); // a magic string + version number
-    header.put("strings",globalStringList);
+    Map header = new LinkedHashMap<String, Object>();
+    header.put("SOLR_TLOG", 1); // a magic string + version number
+    header.put("strings", globalStringList);
     codec.marshal(header, fos);
 
     endRecord(pos);
   }
 
   protected void endRecord(long startRecordPosition) throws IOException {
-    fos.writeInt((int)(fos.size() - startRecordPosition));
+    fos.writeInt((int) (fos.size() - startRecordPosition));
     numRecords++;
   }
 
@@ -347,7 +348,7 @@ public class TransactionLog implements Closeable {
    * the command to the transaction log.)
    * @param cmd The add update command to be written
    * @return Returns the position pointer of the written update command
-   * 
+   *
    * @see #write(AddUpdateCommand, long)
    */
   public long write(AddUpdateCommand cmd) {
@@ -357,14 +358,14 @@ public class TransactionLog implements Closeable {
   /**
    * Writes an add update command to the transaction log. This should be called only for
    * writing in-place updates, or else pass -1 as the prevPointer.
-   * @param cmd The add update command to be written
-   * @param prevPointer The pointer in the transaction log which this update depends 
-   * on (applicable for in-place updates)
+   * @param cmd         The add update command to be written
+   * @param prevPointer The pointer in the transaction log which this update depends
+   *                    on (applicable for in-place updates)
    * @return Returns the position pointer of the written update command
    */
   public long write(AddUpdateCommand cmd, long prevPointer) {
     assert (-1 <= prevPointer && (cmd.isInPlaceUpdate() || (-1 == prevPointer)));
-    
+
     LogCodec codec = new LogCodec(resolver);
     SolrInputDocument sdoc = cmd.getSolrInputDocument();
 
@@ -374,7 +375,7 @@ public class TransactionLog implements Closeable {
       // adaptive buffer sizing
       int bufSize = lastAddSize;    // unsynchronized access of lastAddSize should be fine
       // at least 256 bytes and at most 1 MB
-      bufSize = Math.min(1024*1024, Math.max(256, bufSize+(bufSize>>3)+256));
+      bufSize = Math.min(1024 * 1024, Math.max(256, bufSize + (bufSize >> 3) + 256));
 
       MemOutputStream out = new MemOutputStream(new byte[bufSize]);
       codec.init(out);
@@ -391,7 +392,7 @@ public class TransactionLog implements Closeable {
         codec.writeLong(cmd.getVersion());
         codec.writeSolrInputDocument(cmd.getSolrInputDocument());
       }
-      lastAddSize = (int)out.size();
+      lastAddSize = (int) out.size();
 
       synchronized (this) {
         long pos = fos.size();   // if we had flushed, this should be equal to channel.position()
@@ -465,9 +466,9 @@ public class TransactionLog implements Closeable {
         // fos.flushBuffer();  // flush later
         return pos;
       }
-      } catch (IOException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
-      }
+    } catch (IOException e) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+    }
 
   }
 
@@ -515,10 +516,10 @@ public class TransactionLog implements Closeable {
         fos.flushBuffer();
         /***
          System.out.println("###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos);
-        if (fos.size() != raf.length() || pos >= fos.size() ) {
-          throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos);
-        }
-        ***/
+         if (fos.size() != raf.length() || pos >= fos.size() ) {
+         throw new RuntimeException("ERROR" + "###flushBuffer to " + fos.size() + " raf.length()=" + raf.length() + " pos="+pos);
+         }
+         ***/
       }
 
       ChannelFastInputStream fis = new ChannelFastInputStream(channel, pos);
@@ -633,7 +634,8 @@ public class TransactionLog implements Closeable {
 
   /** Returns a reader that can be used while a log is still in use.
    * Currently only *one* LogReader may be outstanding, and that log may only
-   * be used from a single thread. */
+   * be used from a single thread.
+   */
   public LogReader getReader(long startingPos) {
     return new LogReader(startingPos);
   }
@@ -744,7 +746,7 @@ public class TransactionLog implements Closeable {
         long pos = startingPos;
 
         long lastVersion = Long.MIN_VALUE;
-        while ( (o = super.next()) != null) {
+        while ((o = super.next()) != null) {
           List entry = (List) o;
           long version = (Long) entry.get(UpdateLog.VERSION_IDX);
           version = Math.abs(version);
@@ -780,10 +782,11 @@ public class TransactionLog implements Closeable {
 
     /* returns the position in the log file of the last record returned by next() */
     public abstract long position();
+
     public abstract void close();
 
     @Override
-    public abstract String toString() ;
+    public abstract String toString();
 
   }
 
@@ -812,7 +815,7 @@ public class TransactionLog implements Closeable {
       }
 
       fis = new ChannelFastInputStream(channel, 0);
-      if (sz >=4) {
+      if (sz >= 4) {
         // readHeader(fis);  // should not be needed
         prevPos = sz - 4;
         fis.seek(prevPos);
@@ -843,7 +846,7 @@ public class TransactionLog implements Closeable {
       } else {
         // Position buffer so that this record is at the end.
         // For small records, this will cause subsequent calls to next() to be within the buffer.
-        long seekPos =  endOfThisRecord - fis.getBufferSize();
+        long seekPos = endOfThisRecord - fis.getBufferSize();
         seekPos = Math.min(seekPos, prevPos); // seek to the start of the record if it's larger then the block size.
         seekPos = Math.max(seekPos, 0);
         fis.seek(seekPos);
@@ -880,57 +883,54 @@ public class TransactionLog implements Closeable {
 
   }
 
-}
-
-
+  static class ChannelFastInputStream extends FastInputStream {
+    private FileChannel ch;
 
-class ChannelFastInputStream extends FastInputStream {
-  private FileChannel ch;
-
-  public ChannelFastInputStream(FileChannel ch, long chPosition) {
-    // super(null, new byte[10],0,0);    // a small buffer size for testing purposes
-    super(null);
-    this.ch = ch;
-    super.readFromStream = chPosition;
-  }
+    public ChannelFastInputStream(FileChannel ch, long chPosition) {
+      // super(null, new byte[10],0,0);    // a small buffer size for testing purposes
+      super(null);
+      this.ch = ch;
+      super.readFromStream = chPosition;
+    }
 
-  @Override
-  public int readWrappedStream(byte[] target, int offset, int len) throws IOException {
-    ByteBuffer bb = ByteBuffer.wrap(target, offset, len);
-    int ret = ch.read(bb, readFromStream);
-    return ret;
-  }
+    @Override
+    public int readWrappedStream(byte[] target, int offset, int len) throws IOException {
+      ByteBuffer bb = ByteBuffer.wrap(target, offset, len);
+      int ret = ch.read(bb, readFromStream);
+      return ret;
+    }
 
-  public void seek(long position) throws IOException {
-    if (position <= readFromStream && position >= getBufferPos()) {
-      // seek within buffer
-      pos = (int)(position - getBufferPos());
-    } else {
-      // long currSize = ch.size();   // not needed - underlying read should handle (unless read never done)
-      // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " on file of size " + currSize + " file=" + ch);
-      readFromStream = position;
-      end = pos = 0;
+    public void seek(long position) throws IOException {
+      if (position <= readFromStream && position >= getBufferPos()) {
+        // seek within buffer
+        pos = (int) (position - getBufferPos());
+      } else {
+        // long currSize = ch.size();   // not needed - underlying read should handle (unless read never done)
+        // if (position > currSize) throw new EOFException("Read past EOF: seeking to " + position + " on file of size " + currSize + " file=" + ch);
+        readFromStream = position;
+        end = pos = 0;
+      }
+      assert position() == position;
     }
-    assert position() == position;
-  }
 
   /** where is the start of the buffer relative to the whole file */
-  public long getBufferPos() {
-    return readFromStream - end;
-  }
+    public long getBufferPos() {
+      return readFromStream - end;
+    }
 
-  public int getBufferSize() {
-    return buf.length;
-  }
+    public int getBufferSize() {
+      return buf.length;
+    }
 
-  @Override
-  public void close() throws IOException {
-    ch.close();
-  }
+    @Override
+    public void close() throws IOException {
+      ch.close();
+    }
 
-  @Override
-  public String toString() {
-    return "readFromStream="+readFromStream +" pos="+pos +" end="+end + " bufferPos="+getBufferPos() + " position="+position() ;
+    @Override
+    public String toString() {
+      return "readFromStream=" + readFromStream + " pos=" + pos + " end=" + end + " bufferPos=" + getBufferPos() + " position=" + position();
+    }
   }
 }
 
diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
index 158900d..8da2df7 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedZkUpdateProcessor.java
@@ -131,7 +131,7 @@ public class DistributedZkUpdateProcessor extends DistributedUpdateProcessor {
     while (nextInChain != null)  {
       Class<? extends UpdateRequestProcessor> klass = nextInChain.getClass();
       if (klass != LogUpdateProcessorFactory.LogUpdateProcessor.class
-          && klass != RunUpdateProcessor.class
+          && klass != RunUpdateProcessorFactory.RunUpdateProcessor.class
           && klass != TolerantUpdateProcessor.class)  {
         shouldClone = true;
         break;
diff --git a/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java
index d49ab27..a208d41 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/RunUpdateProcessorFactory.java
@@ -33,14 +33,12 @@ import org.apache.solr.update.*;
  * @since solr 1.3
  * @see DistributingUpdateProcessorFactory
  */
-public class RunUpdateProcessorFactory extends UpdateRequestProcessorFactory 
-{
+public class RunUpdateProcessorFactory extends UpdateRequestProcessorFactory {
 
   public static final String PRE_RUN_CHAIN_NAME = "_preRun_";
 
   @Override
-  public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) 
-  {
+  public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
     RunUpdateProcessor runUpdateProcessor = new RunUpdateProcessor(req, next);
     UpdateRequestProcessorChain preRun = req.getCore().getUpdateProcessingChain(PRE_RUN_CHAIN_NAME);
     if (preRun != null) {
@@ -49,82 +47,79 @@ public class RunUpdateProcessorFactory extends UpdateRequestProcessorFactory
       return runUpdateProcessor;
     }
   }
-}
 
-class RunUpdateProcessor extends UpdateRequestProcessor 
-{
-  private final SolrQueryRequest req;
-  private final UpdateHandler updateHandler;
 
-  private boolean changesSinceCommit = false;
+  static class RunUpdateProcessor extends UpdateRequestProcessor {
+    private final SolrQueryRequest req;
+    private final UpdateHandler updateHandler;
 
-  public RunUpdateProcessor(SolrQueryRequest req, UpdateRequestProcessor next) {
-    super( next );
-    this.req = req;
-    this.updateHandler = req.getCore().getUpdateHandler();
-  }
+    private boolean changesSinceCommit = false;
 
-  @Override
-  public void processAdd(AddUpdateCommand cmd) throws IOException {
-    
-    if (AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) {
-      throw new SolrException
-        (SolrException.ErrorCode.BAD_REQUEST,
-         "RunUpdateProcessor has received an AddUpdateCommand containing a document that appears to still contain Atomic document update operations, most likely because DistributedUpdateProcessorFactory was explicitly disabled from this updateRequestProcessorChain");
+    public RunUpdateProcessor(SolrQueryRequest req, UpdateRequestProcessor next) {
+      super(next);
+      this.req = req;
+      this.updateHandler = req.getCore().getUpdateHandler();
     }
 
-    updateHandler.addDoc(cmd);
-    super.processAdd(cmd);
-    changesSinceCommit = true;
-  }
+    @Override
+    public void processAdd(AddUpdateCommand cmd) throws IOException {
 
-  @Override
-  public void processDelete(DeleteUpdateCommand cmd) throws IOException {
-    if( cmd.isDeleteById()) {
-      updateHandler.delete(cmd);
+      if (AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)) {
+        throw new SolrException
+                (SolrException.ErrorCode.BAD_REQUEST,
+                        "RunUpdateProcessor has received an AddUpdateCommand containing a document that appears to still contain Atomic document update operations, most likely because DistributedUpdateProcessorFactory was explicitly disabled from this updateRequestProcessorChain");
+      }
+
+      updateHandler.addDoc(cmd);
+      super.processAdd(cmd);
+      changesSinceCommit = true;
     }
-    else {
-      updateHandler.deleteByQuery(cmd);
+
+    @Override
+    public void processDelete(DeleteUpdateCommand cmd) throws IOException {
+      if (cmd.isDeleteById()) {
+        updateHandler.delete(cmd);
+      } else {
+        updateHandler.deleteByQuery(cmd);
+      }
+      super.processDelete(cmd);
+      changesSinceCommit = true;
     }
-    super.processDelete(cmd);
-    changesSinceCommit = true;
-  }
 
-  @Override
-  public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
-    updateHandler.mergeIndexes(cmd);
-    super.processMergeIndexes(cmd);
-  }
+    @Override
+    public void processMergeIndexes(MergeIndexesCommand cmd) throws IOException {
+      updateHandler.mergeIndexes(cmd);
+      super.processMergeIndexes(cmd);
+    }
 
-  @Override
-  public void processCommit(CommitUpdateCommand cmd) throws IOException
-  {
-    updateHandler.commit(cmd);
-    super.processCommit(cmd);
-    if (!cmd.softCommit) {
-      // a hard commit means we don't need to flush the transaction log
-      changesSinceCommit = false;
+    @Override
+    public void processCommit(CommitUpdateCommand cmd) throws IOException {
+      updateHandler.commit(cmd);
+      super.processCommit(cmd);
+      if (!cmd.softCommit) {
+        // a hard commit means we don't need to flush the transaction log
+        changesSinceCommit = false;
+      }
     }
-  }
 
-  /**
-   * @since Solr 1.4
-   */
-  @Override
-  public void processRollback(RollbackUpdateCommand cmd) throws IOException
-  {
-    updateHandler.rollback(cmd);
-    super.processRollback(cmd);
-    changesSinceCommit = false;
-  }
+    /**
+     * @since Solr 1.4
+     */
+    @Override
+    public void processRollback(RollbackUpdateCommand cmd) throws IOException {
+      updateHandler.rollback(cmd);
+      super.processRollback(cmd);
+      changesSinceCommit = false;
+    }
 
 
-  @Override
-  public void finish() throws IOException {
-    if (changesSinceCommit && updateHandler.getUpdateLog() != null) {
-      updateHandler.getUpdateLog().finish(null);
+    @Override
+    public void finish() throws IOException {
+      if (changesSinceCommit && updateHandler.getUpdateLog() != null) {
+        updateHandler.getUpdateLog().finish(null);
+      }
+      super.finish();
     }
-    super.finish();
   }
 }
 
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
index 2a5dcd1..522b22c 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
@@ -136,7 +136,7 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
     try {
       int nShards = responsesAndTests.length / 2;
       Object jsonFacet = Utils.fromJSONString(facet);
-      FacetParser parser = new FacetRequest.FacetTopParser(req);
+      FacetParser parser = new FacetParser.FacetTopParser(req);
       FacetRequest facetRequest = parser.parse(jsonFacet);
 
       FacetMerger merger = null;
diff --git a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
index 66d612f..cbd6920 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/UpdateRequestProcessorFactoryTest.java
@@ -162,7 +162,7 @@ public class UpdateRequestProcessorFactoryTest extends SolrTestCaseJ4 {
 
       // for these 3 (distrib) chains, the last proc should always be RunUpdateProcessor
       assertTrue(name + " (distrib) last processor isn't a RunUpdateProcessor: " + procs.toString(),
-                 procs.get(procs.size()-1) instanceof RunUpdateProcessor );
+                 procs.get(procs.size()-1) instanceof RunUpdateProcessorFactory.RunUpdateProcessor );
 
       // either 1 proc was droped in distrib mode, or 1 for the "implicit" chain
 
diff --git a/solr/solrj/src/java/org/noggit/CharArr.java b/solr/solrj/src/java/org/noggit/CharArr.java
index 9ecc8e6..0431e10 100644
--- a/solr/solrj/src/java/org/noggit/CharArr.java
+++ b/solr/solrj/src/java/org/noggit/CharArr.java
@@ -225,170 +225,170 @@ public class CharArr implements CharSequence, Appendable {
     write(c);
     return this;
   }
-}
 
 
-class NullCharArr extends CharArr {
-  public NullCharArr() {
-    super(new char[1], 0, 0);
-  }
+  static class NullCharArr extends CharArr {
+    public NullCharArr() {
+      super(new char[1], 0, 0);
+    }
 
-  @Override
-  public void unsafeWrite(char b) {
-  }
+    @Override
+    public void unsafeWrite(char b) {
+    }
 
-  @Override
-  public void unsafeWrite(char b[], int off, int len) {
-  }
+    @Override
+    public void unsafeWrite(char b[], int off, int len) {
+    }
 
-  @Override
-  public void unsafeWrite(int b) {
-  }
+    @Override
+    public void unsafeWrite(int b) {
+    }
 
-  @Override
-  public void write(char b) {
-  }
+    @Override
+    public void write(char b) {
+    }
 
-  @Override
-  public void write(char b[], int off, int len) {
-  }
+    @Override
+    public void write(char b[], int off, int len) {
+    }
 
-  @Override
-  public void reserve(int num) {
-  }
+    @Override
+    public void reserve(int num) {
+    }
 
-  @Override
-  protected void resize(int len) {
-  }
+    @Override
+    protected void resize(int len) {
+    }
 
-  @Override
-  public Appendable append(CharSequence csq, int start, int end) throws IOException {
-    return this;
-  }
+    @Override
+    public Appendable append(CharSequence csq, int start, int end) throws IOException {
+      return this;
+    }
 
-  @Override
-  public char charAt(int index) {
-    return 0;
-  }
+    @Override
+    public char charAt(int index) {
+      return 0;
+    }
 
-  @Override
-  public void write(String s, int stringOffset, int len) {
+    @Override
+    public void write(String s, int stringOffset, int len) {
+    }
   }
-}
 
 
-// IDEA: a subclass that refills the array from a reader?
-class CharArrReader extends CharArr {
-  protected final Reader in;
+  // IDEA: a subclass that refills the array from a reader?
+  class CharArrReader extends CharArr {
+    protected final Reader in;
 
-  public CharArrReader(Reader in, int size) {
-    super(size);
-    this.in = in;
-  }
+    public CharArrReader(Reader in, int size) {
+      super(size);
+      this.in = in;
+    }
 
-  @Override
-  public int read() throws IOException {
-    if (start >= end) fill();
-    return start >= end ? -1 : buf[start++];
-  }
+    @Override
+    public int read() throws IOException {
+      if (start >= end) fill();
+      return start >= end ? -1 : buf[start++];
+    }
 
-  @Override
-  public int read(CharBuffer cb) throws IOException {
-    // empty the buffer and then read direct
-    int sz = size();
-    if (sz > 0) cb.put(buf, start, end);
-    int sz2 = in.read(cb);
-    if (sz2 >= 0) return sz + sz2;
-    return sz > 0 ? sz : -1;
-  }
+    @Override
+    public int read(CharBuffer cb) throws IOException {
+      // empty the buffer and then read direct
+      int sz = size();
+      if (sz > 0) cb.put(buf, start, end);
+      int sz2 = in.read(cb);
+      if (sz2 >= 0) return sz + sz2;
+      return sz > 0 ? sz : -1;
+    }
 
-  @Override
-  public int fill() throws IOException {
-    if (start >= end) {
-      reset();
-    } else if (start > 0) {
-      System.arraycopy(buf, start, buf, 0, size());
-      end = size();
-      start = 0;
+    @Override
+    public int fill() throws IOException {
+      if (start >= end) {
+        reset();
+      } else if (start > 0) {
+        System.arraycopy(buf, start, buf, 0, size());
+        end = size();
+        start = 0;
+      }
+      /***
+       // fill fully or not???
+       do {
+       int sz = in.read(buf,end,buf.length-end);
+       if (sz==-1) return;
+       end+=sz;
+       } while (end < buf.length);
+       ***/
+
+      int sz = in.read(buf, end, buf.length - end);
+      if (sz > 0) end += sz;
+      return sz;
     }
-    /***
-     // fill fully or not???
-     do {
-     int sz = in.read(buf,end,buf.length-end);
-     if (sz==-1) return;
-     end+=sz;
-     } while (end < buf.length);
-     ***/
 
-    int sz = in.read(buf, end, buf.length - end);
-    if (sz > 0) end += sz;
-    return sz;
   }
 
-}
-
 
-class CharArrWriter extends CharArr {
-  protected Writer sink;
+  class CharArrWriter extends CharArr {
+    protected Writer sink;
 
-  @Override
-  public void flush() {
-    try {
-      sink.write(buf, start, end - start);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-    start = end = 0;
-  }
-
-  @Override
-  public void write(char b) {
-    if (end >= buf.length) {
-      flush();
-    }
-    unsafeWrite(b);
-  }
-
-  @Override
-  public void write(char b[], int off, int len) {
-    int space = buf.length - end;
-    if (len < space) {
-      unsafeWrite(b, off, len);
-    } else if (len < buf.length) {
-      unsafeWrite(b, off, space);
-      flush();
-      unsafeWrite(b, off + space, len - space);
-    } else {
-      flush();
+    @Override
+    public void flush() {
       try {
-        sink.write(b, off, len);
+        sink.write(buf, start, end - start);
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
+      start = end = 0;
     }
-  }
 
-  @Override
-  public void write(String s, int stringOffset, int len) {
-    int space = buf.length - end;
-    if (len < space) {
-      s.getChars(stringOffset, stringOffset + len, buf, end);
-      end += len;
-    } else if (len < buf.length) {
-      // if the data to write is small enough, buffer it.
-      s.getChars(stringOffset, stringOffset + space, buf, end);
-      flush();
-      s.getChars(stringOffset + space, stringOffset + len, buf, 0);
-      end = len - space;
-    } else {
-      flush();
-      // don't buffer, just write to sink
-      try {
-        sink.write(s, stringOffset, len);
-      } catch (IOException e) {
-        throw new RuntimeException(e);
+    @Override
+    public void write(char b) {
+      if (end >= buf.length) {
+        flush();
       }
+      unsafeWrite(b);
+    }
 
+    @Override
+    public void write(char b[], int off, int len) {
+      int space = buf.length - end;
+      if (len < space) {
+        unsafeWrite(b, off, len);
+      } else if (len < buf.length) {
+        unsafeWrite(b, off, space);
+        flush();
+        unsafeWrite(b, off + space, len - space);
+      } else {
+        flush();
+        try {
+          sink.write(b, off, len);
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+      }
+    }
+
+    @Override
+    public void write(String s, int stringOffset, int len) {
+      int space = buf.length - end;
+      if (len < space) {
+        s.getChars(stringOffset, stringOffset + len, buf, end);
+        end += len;
+      } else if (len < buf.length) {
+        // if the data to write is small enough, buffer it.
+        s.getChars(stringOffset, stringOffset + space, buf, end);
+        flush();
+        s.getChars(stringOffset + space, stringOffset + len, buf, 0);
+        end = len - space;
+      } else {
+        flush();
+        // don't buffer, just write to sink
+        try {
+          sink.write(s, stringOffset, len);
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
+
+      }
     }
   }
 }
diff --git a/solr/solrj/src/java/org/noggit/JSONParser.java b/solr/solrj/src/java/org/noggit/JSONParser.java
index 8b1ac01..d1655d1 100644
--- a/solr/solrj/src/java/org/noggit/JSONParser.java
+++ b/solr/solrj/src/java/org/noggit/JSONParser.java
@@ -132,7 +132,7 @@ public class JSONParser {
     return "Unknown: " + e;
   }
 
-  private static final CharArr devNull = new NullCharArr();
+  private static final CharArr devNull = new CharArr.NullCharArr();
 
   protected int flags = FLAGS_DEFAULT;
 


[lucene-solr] 40/47: LUCENE-9359: Address test failures when the codec version gets modified.

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 793c1a1c8899fc503770009a3a65caf04d6d1872
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Fri May 29 21:16:10 2020 +0200

    LUCENE-9359: Address test failures when the codec version gets modified.
---
 lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
index 23c98ad..1d6a4f4 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
@@ -31,6 +31,7 @@ import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.Version;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
@@ -235,7 +236,9 @@ public class TestSegmentInfos extends LuceneTestCase {
     }
     assertTrue("No segments file found", corrupt);
 
-    expectThrows(CorruptIndexException.class, () -> SegmentInfos.readLatestCommit(corruptDir));
+    expectThrowsAnyOf(
+        Arrays.asList(CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class),
+        () -> SegmentInfos.readLatestCommit(corruptDir));
     dir.close();
     corruptDir.close();
   }


[lucene-solr] 27/47: Add bugfix version 8.5.2

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3e00bf9214c7218ec6b38892fac1c69a4db44373
Author: Mike Drob <md...@apple.com>
AuthorDate: Wed May 27 12:09:36 2020 -0500

    Add bugfix version 8.5.2
---
 lucene/CHANGES.txt                                      | 13 +++++++++----
 .../core/src/java/org/apache/lucene/util/Version.java   |  7 +++++++
 solr/CHANGES.txt                                        | 17 ++++++++++++-----
 3 files changed, 28 insertions(+), 9 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index c8b382f..cd42f6e 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -218,10 +218,6 @@ Optimizations
 * LUCENE-9087: Build always trees with full leaves and lower the default value for maxPointsPerLeafNode to 512.
   (Ignacio Vera)
 
-* LUCENE-9350: Partial reversion of LUCENE-9068; holding levenshtein automata on FuzzyQuery can end
-  up blowing up query caches which use query objects as cache keys, so building the automata is
-  now delayed to search time again.  (Alan Woodward, Mike Drob)
-
 Bug Fixes
 ---------------------
 * LUCENE-9259: Fix wrong NGramFilterFactory argument name for preserveOriginal option (Paul Pazderski)
@@ -294,6 +290,15 @@ Build
 
 * LUCENE-9380: Fix auxiliary class warnings in Lucene (Erick Erickson)
 
+======================= Lucene 8.5.2 =======================
+
+Optimizations
+---------------------
+
+* LUCENE-9350: Partial reversion of LUCENE-9068; holding levenshtein automata on FuzzyQuery can end
+  up blowing up query caches which use query objects as cache keys, so building the automata is
+  now delayed to search time again.  (Alan Woodward, Mike Drob)
+
 ======================= Lucene 8.5.1 =======================
 
 Bug Fixes
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index 5ed1a95..f5dbcc1 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -103,6 +103,13 @@ public final class Version {
   public static final Version LUCENE_8_5_1 = new Version(8, 5, 1);
 
   /**
+   * Match settings and bugs in Lucene's 8.5.2 release.
+   * @deprecated Use latest
+   */
+  @Deprecated
+  public static final Version LUCENE_8_5_2 = new Version(8, 5, 2);
+
+  /**
    * Match settings and bugs in Lucene's 8.6.0 release.
    * @deprecated Use latest
    */
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index d163d11..aeb7945 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -172,8 +172,6 @@ Bug Fixes
 
 * SOLR-14291: Handle dotted fields in legacy Analytics Component (Anatolii Siuniaev via Mikhail Khludnev)
 
-* SOLR-14411: Fix Admin UI collection/core drop-downs placeholder text. Completes work started in SOLR-14359 (janhoy)
-
 * SOLR-14371: Zk StatusHandler now parse dynamic zk server config if supported, fixing Admin UI Zookeeper Status
   screen in case dynamic reconfig host list differs from static zkHost string (janhoy)
 
@@ -186,9 +184,6 @@ Bug Fixes
 * SOLR-14456: Fix Content-Type header usage when a request is forwarded from Solr node to Solr
   node with compression enabled (samuelgmartinez via Houston Putman)
 
-* SOLR-14471: Fix bug in shards.preference behavior, base replica selection strategy not applied to the last group of
-  equivalent replicas. (Michael Gibney via Tomás Fernández Löbbe)
-
 * SOLR-8394: /admin/luke was always showing 0 for indexHeapUsageBytes. It should work now.
   (Steve Molloy, Isabelle Giguere, David Smiley)
 
@@ -265,6 +260,18 @@ Other Changes
 
 * SOLR-14474: Fix remaining auxilliary class warnings in Solr (Erick Erickson)
 
+==================  8.5.2 ==================
+
+Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
+
+Bug Fixes
+---------------------
+
+* SOLR-14411: Fix Admin UI collection/core drop-downs placeholder text. Completes work started in SOLR-14359 (janhoy)
+
+* SOLR-14471: Fix bug in shards.preference behavior, base replica selection strategy not applied to the last group of
+  equivalent replicas. (Michael Gibney via Tomás Fernández Löbbe)
+
 ==================  8.5.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.


[lucene-solr] 01/47: SOLR-14473: Improve Overseer Javadoc (#1510)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3cb8ebbc0b22ab2929a3040da20329426dffb91b
Author: murblanc <43...@users.noreply.github.com>
AuthorDate: Mon May 18 22:21:16 2020 +0200

    SOLR-14473: Improve Overseer Javadoc (#1510)
    
    Co-authored-by: Ilan Ginzburg <ig...@salesforce.com>
---
 .../src/java/org/apache/solr/cloud/Overseer.java   | 64 +++++++++++++++++++++-
 .../org/apache/solr/cloud/ZkDistributedQueue.java  | 11 +++-
 .../solr/common/params/CollectionParams.java       |  9 +++
 3 files changed, 80 insertions(+), 4 deletions(-)

diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index 9df6a2d..dd01368 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -38,6 +38,7 @@ import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.ClusterStateProvider;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.cloud.api.collections.CreateCollectionCmd;
 import org.apache.solr.cloud.api.collections.OverseerCollectionMessageHandler;
 import org.apache.solr.cloud.autoscaling.OverseerTriggerThread;
 import org.apache.solr.cloud.overseer.ClusterStateMutator;
@@ -79,8 +80,61 @@ import org.slf4j.LoggerFactory;
 import com.codahale.metrics.Timer;
 
 /**
- * Cluster leader. Responsible for processing state updates, node assignments, creating/deleting
- * collections, shards, replicas and setting various properties.
+ * <p>Cluster leader. Responsible for processing state updates, node assignments, creating/deleting
+ * collections, shards, replicas and setting various properties.</p>
+ *
+ * <p>The <b>Overseer</b> is a single elected node in the SolrCloud cluster that is in charge of interactions with
+ * ZooKeeper that require global synchronization. It also hosts the Collection API implementation and the
+ * Autoscaling framework.</p>
+ *
+ * <p>The Overseer deals with:</p>
+ * <ul>
+ *   <li>Cluster State updates, i.e. updating Collections' <code>state.json</code> files in ZooKeeper, see {@link ClusterStateUpdater},</li>
+ *   <li>Collection API implementation, including Autoscaling replica placement computation, see
+ *   {@link OverseerCollectionConfigSetProcessor} and {@link OverseerCollectionMessageHandler} (and the example below),</li>
+ *   <li>Updating Config Sets, see {@link OverseerCollectionConfigSetProcessor} and {@link OverseerConfigSetMessageHandler},</li>
+ *   <li>Autoscaling triggers, see {@link org.apache.solr.cloud.autoscaling.OverseerTriggerThread}.</li>
+ * </ul>
+ *
+ * <p>The nodes in the cluster communicate with the Overseer over queues implemented in ZooKeeper. There are essentially
+ * two queues:</p>
+ * <ol>
+ *   <li>The <b>state update queue</b>, through which nodes request the Overseer to update the <code>state.json</code> file of a
+ *   Collection in ZooKeeper. This queue is in Zookeeper at <code>/overseer/queue</code>,</li>
+ *   <li>A queue shared between <b>Collection API and Config Set API</b> requests. This queue is in Zookeeper at
+ *   <code>/overseer/collection-queue-work</code>.</li>
+ * </ol>
+ *
+ * <p>An example of the steps involved in the Overseer processing a Collection creation API call:</p>
+ * <ol>
+ *   <li>Client uses the Collection API with <code>CREATE</code> action and reaches a node of the cluster,</li>
+ *   <li>The node (via {@link CollectionsHandler}) enqueues the request into the <code>/overseer/collection-queue-work</code>
+ *   queue in ZooKeepeer,</li>
+ *   <li>The {@link OverseerCollectionConfigSetProcessor} running on the Overseer node dequeues the message and using an
+ *   executor service with a maximum pool size of {@link OverseerTaskProcessor#MAX_PARALLEL_TASKS} hands it for processing
+ *   to {@link OverseerCollectionMessageHandler},</li>
+ *   <li>Command {@link CreateCollectionCmd} then executes and does:
+ *   <ol>
+ *     <li>Update some state directly in ZooKeeper (creating collection znode),</li>
+ *     <li>Compute replica placement on available nodes in the cluster,</li>
+ *     <li>Enqueue a state change request for creating the <code>state.json</code> file for the collection in ZooKeeper.
+ *     This is done by enqueuing a message in <code>/overseer/queue</code>,</li>
+ *     <li>The command then waits for the update to be seen in ZooKeeper...</li>
+ *   </ol></li>
+ *   <li>The {@link ClusterStateUpdater} (also running on the Overseer node) dequeues the state change message and creates the
+ *   <code>state.json</code> file in ZooKeeper for the Collection. All the work of the cluster state updater
+ *   (creations, updates, deletes) is done sequentially for the whole cluster by a single thread.</li>
+ *   <li>The {@link CreateCollectionCmd} sees the state change in
+ *   ZooKeeper and:
+ *   <ol start="5">
+ *     <li>Builds and sends requests to each node to create the appropriate cores for all the replicas of all shards
+ *     of the collection. Nodes create the replicas and set them to {@link org.apache.solr.common.cloud.Replica.State#ACTIVE}.</li>
+ *   </ol></li>
+ *   <li>The collection creation command has succeeded from the Overseer perspective,</li>
+ *   <li>{@link CollectionsHandler} checks the replicas in Zookeeper and verifies they are all
+ *   {@link org.apache.solr.common.cloud.Replica.State#ACTIVE},</li>
+ *   <li>The client receives a success return.</li>
+ * </ol>
  */
 public class Overseer implements SolrCloseable {
   public static final String QUEUE_OPERATION = "operation";
@@ -97,6 +151,12 @@ public class Overseer implements SolrCloseable {
 
   enum LeaderStatus {DONT_KNOW, NO, YES}
 
+  /**
+   * <p>This class is responsible for dequeueing state change requests from the ZooKeeper queue at <code>/overseer/queue</code>
+   * and executing the requested cluster change (essentially writing or updating <code>state.json</code> for a collection).</p>
+   *
+   * <p>The cluster state updater is a single thread dequeueing and executing requests.</p>
+   */
   private class ClusterStateUpdater implements Runnable, Closeable {
 
     private final ZkStateReader reader;
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
index 465888f..53d799b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
@@ -51,9 +51,16 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * A ZK-based distributed queue. Optimized for single-consumer,
+ * <p>A ZK-based distributed queue. Optimized for single-consumer,
  * multiple-producer: if there are multiple consumers on the same ZK queue,
- * the results should be correct but inefficient
+ * the results should be correct but inefficient.</p>
+ *
+ * <p>This implementation (with help from subclass {@link OverseerTaskQueue}) is used for the
+ * <code>/overseer/collection-queue-work</code> queue used for Collection and Config Set API calls to the Overseer.</p>
+ *
+ * <p><i>Implementation note:</i> In order to enqueue a message into this queue, a {@link CreateMode#EPHEMERAL_SEQUENTIAL} response node is created
+ * and watched at <code>/overseer/collection-queue-work/qnr-<i>monotonically_increasng_id</i></code>, then a corresponding
+ * {@link CreateMode#PERSISTENT} request node reusing the same id is created at <code>/overseer/collection-queue-work/qn-<i>response_id</i></code>.</p>
  */
 public class ZkDistributedQueue implements DistributedQueue {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
index 89f1600..3e8ee37 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CollectionParams.java
@@ -70,6 +70,15 @@ public interface CollectionParams {
     }
   }
 
+  /**
+   * <p>(Mostly) Collection API actions that can be sent by nodes to the Overseer over the <code>/overseer/collection-queue-work</code>
+   * ZooKeeper queue.</p>
+   *
+   * <p>Some of these actions are also used over the cluster state update queue at <code>/overseer/queue</code> and have a
+   * different (though related) meaning there. These actions are:
+   * {@link #CREATE}, {@link #DELETE}, {@link #CREATESHARD}, {@link #DELETESHARD}, {@link #ADDREPLICA}, {@link #ADDREPLICAPROP},
+   * {@link #DELETEREPLICAPROP}, {@link #BALANCESHARDUNIQUE}, {@link #MODIFYCOLLECTION} and {@link #MIGRATESTATEFORMAT}.</p>
+   */
   enum CollectionAction {
     CREATE(true, LockLevel.COLLECTION),
     DELETE(true, LockLevel.COLLECTION),


[lucene-solr] 31/47: SOLR-14237: Fix an error on admin UI due to improper variable handling

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 8fc28efa13fe3a6d928abe9e4152ed090cd281d7
Author: Ishan Chattopadhyaya <is...@apache.org>
AuthorDate: Thu May 28 16:55:00 2020 +0530

    SOLR-14237: Fix an error on admin UI due to improper variable handling
---
 solr/webapp/web/js/angular/controllers/index.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/webapp/web/js/angular/controllers/index.js b/solr/webapp/web/js/angular/controllers/index.js
index f8b0747..e931d59 100644
--- a/solr/webapp/web/js/angular/controllers/index.js
+++ b/solr/webapp/web/js/angular/controllers/index.js
@@ -21,7 +21,7 @@ solrAdminApp.controller('IndexController', function($scope, System, Cores, Const
     System.get(function(data) {
       $scope.system = data;
 
-      if (username in data.security) {
+      if ("username" in data.security) {
         // Needed for Kerberos, since this is the only place from where
         // Kerberos username can be obtained.
         sessionStorage.setItem("auth.username", data.security.username);


[lucene-solr] 42/47: SOLR-14519:Fix or suppress warnings in solr/cloud/autoscaling

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 545dcc1801b6ccb81b6443d0a6db620978ff5ab5
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Sun May 31 15:07:06 2020 -0400

    SOLR-14519:Fix or suppress warnings in solr/cloud/autoscaling
---
 solr/CHANGES.txt                                      |  2 +-
 .../apache/solr/cloud/autoscaling/AutoScaling.java    |  2 ++
 .../solr/cloud/autoscaling/AutoScalingHandler.java    | 10 ++++++++++
 .../solr/cloud/autoscaling/ComputePlanAction.java     |  7 +++++++
 .../solr/cloud/autoscaling/ExecutePlanAction.java     |  2 ++
 .../cloud/autoscaling/InactiveShardPlanAction.java    |  1 +
 .../solr/cloud/autoscaling/IndexSizeTrigger.java      |  2 ++
 .../apache/solr/cloud/autoscaling/MetricTrigger.java  |  1 +
 .../solr/cloud/autoscaling/NodeAddedTrigger.java      |  2 ++
 .../solr/cloud/autoscaling/NodeLostTrigger.java       |  2 ++
 .../solr/cloud/autoscaling/ScheduledTriggers.java     |  3 +++
 .../solr/cloud/autoscaling/SearchRateTrigger.java     |  5 +++++
 .../solr/cloud/autoscaling/SystemLogListener.java     |  2 ++
 .../apache/solr/cloud/autoscaling/TriggerBase.java    |  3 +++
 .../apache/solr/cloud/autoscaling/TriggerEvent.java   |  4 ++++
 .../solr/cloud/autoscaling/TriggerEventQueue.java     |  2 ++
 .../apache/solr/cloud/autoscaling/TriggerUtils.java   |  1 +
 .../solr/cloud/autoscaling/sim/SimCloudManager.java   |  6 ++++++
 .../autoscaling/sim/SimClusterStateProvider.java      | 19 +++++++++++++++++++
 .../cloud/autoscaling/sim/SimDistribStateManager.java |  1 +
 .../autoscaling/sim/SimDistributedQueueFactory.java   |  1 +
 .../cloud/autoscaling/sim/SimNodeStateProvider.java   |  3 +++
 .../solr/cloud/autoscaling/sim/SimScenario.java       |  8 ++++++++
 .../apache/solr/cloud/autoscaling/sim/SimUtils.java   |  1 +
 .../cloud/autoscaling/sim/SnapshotCloudManager.java   |  3 +++
 .../autoscaling/sim/SnapshotClusterStateProvider.java |  2 ++
 .../autoscaling/sim/SnapshotDistribStateManager.java  |  2 ++
 .../autoscaling/sim/SnapshotNodeStateProvider.java    |  3 +++
 28 files changed, 99 insertions(+), 1 deletion(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 28d8552..ca3a950 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -252,7 +252,6 @@ Other Changes
 
 * SOLR-14482: Fix or suppress warnings in solr/search/facet (Erick Erickson)
 
-
 * SOLR-14485: Fix or suppress 11 resource leak warnings in apache/solr/cloud (Andras Salaman via
   Erick Erickson)
 
@@ -262,6 +261,7 @@ Other Changes
 
 * SOLR-14474: Fix remaining auxilliary class warnings in Solr (Erick Erickson)
 
+* SOLR-14519: Fix or suppress warnings in solr/cloud/autoscaling/ (Erick Erickson)
 ==================  8.5.2 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java
index 7b2fee7..1a191ee 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScaling.java
@@ -212,6 +212,7 @@ public class AutoScaling {
       "        ]" +
       "    }";
 
+  @SuppressWarnings({"unchecked"})
   public static final Map<String, Object> AUTO_ADD_REPLICAS_TRIGGER_PROPS = (Map) Utils.fromJSONString(AUTO_ADD_REPLICAS_TRIGGER_DSL);
 
   public static final String SCHEDULED_MAINTENANCE_TRIGGER_NAME = ".scheduled_maintenance";
@@ -239,6 +240,7 @@ public class AutoScaling {
           "        ]" +
           "    }";
 
+  @SuppressWarnings({"unchecked"})
   public static final Map<String, Object> SCHEDULED_MAINTENANCE_TRIGGER_PROPS = (Map) Utils.fromJSONString(SCHEDULED_MAINTENANCE_TRIGGER_DSL);
 
 }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
index 1341a24..23ec075 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java
@@ -120,6 +120,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
   }
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
     try {
       String httpMethod = (String) req.getContext().get("httpMethod");
@@ -187,11 +188,13 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
   }
 
 
+  @SuppressWarnings({"unchecked"})
   private void handleSuggestions(SolrQueryResponse rsp, AutoScalingConfig autoScalingConf, SolrParams params) {
     rsp.getValues().add("suggestions",
         PolicyHelper.getSuggestions(autoScalingConf, cloudManager, params));
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void processOps(SolrQueryRequest req, SolrQueryResponse rsp, List<CommandOperation> ops)
       throws KeeperException, InterruptedException, IOException {
     while (true) {
@@ -269,11 +272,13 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     return currentConfig.withProperties(configProps);
   }
 
+  @SuppressWarnings({"unchecked"})
   private void handleDiagnostics(SolrQueryResponse rsp, AutoScalingConfig autoScalingConf) {
     Policy policy = autoScalingConf.getPolicy();
     rsp.getValues().add("diagnostics", PolicyHelper.getDiagnostics(policy, cloudManager));
   }
 
+  @SuppressWarnings({"unchecked"})
   private AutoScalingConfig handleSetClusterPolicy(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op,
                                                    AutoScalingConfig currentConfig) throws KeeperException, InterruptedException, IOException {
     List<Map<String, Object>> clusterPolicy = (List<Map<String, Object>>) op.getCommandData();
@@ -293,6 +298,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     return currentConfig;
   }
 
+  @SuppressWarnings({"unchecked"})
   private AutoScalingConfig handleSetClusterPreferences(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op,
                                                         AutoScalingConfig currentConfig) throws KeeperException, InterruptedException, IOException {
     List<Map<String, Object>> preferences = (List<Map<String, Object>>) op.getCommandData();
@@ -336,6 +342,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     return currentConfig;
   }
 
+  @SuppressWarnings({"unchecked"})
   private AutoScalingConfig handleSetPolicies(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op,
                                               AutoScalingConfig currentConfig) throws KeeperException, InterruptedException, IOException {
     Map<String, Object> policiesMap = op.getDataMap();
@@ -361,6 +368,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     return currentConfig;
   }
 
+  @SuppressWarnings({"unchecked"})
   private AutoScalingConfig handleResumeTrigger(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op,
                                                 AutoScalingConfig currentConfig) throws KeeperException, InterruptedException {
     String triggerName = op.getStr(NAME);
@@ -393,6 +401,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     return currentConfig;
   }
 
+  @SuppressWarnings({"unchecked"})
   private AutoScalingConfig handleSuspendTrigger(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op,
                                                  AutoScalingConfig currentConfig) throws KeeperException, InterruptedException {
     String triggerName = op.getStr(NAME);
@@ -525,6 +534,7 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission
     return currentConfig;
   }
 
+  @SuppressWarnings({"unchecked"})
   private AutoScalingConfig handleSetTrigger(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op,
                                              AutoScalingConfig currentConfig) throws KeeperException, InterruptedException {
     // we're going to modify the op - use a copy
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
index fad45e0..33bf6b0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ComputePlanAction.java
@@ -72,6 +72,7 @@ public class ComputePlanAction extends TriggerActionBase {
         collectionsPredicate = whiteListedCollections::contains;
       }
     } else if (value instanceof Map) {
+      @SuppressWarnings({"unchecked"})
       Map<String, String> matchConditions = (Map<String, String>) value;
       collectionsPredicate = collectionName -> {
         try {
@@ -133,6 +134,7 @@ public class ComputePlanAction extends TriggerActionBase {
           if (Thread.currentThread().isInterrupted()) {
             throw new InterruptedException("stopping - thread was interrupted");
           }
+          @SuppressWarnings({"rawtypes"})
           SolrRequest operation = suggester.getSuggestion();
           opCount++;
           // prepare suggester for the next iteration
@@ -163,6 +165,7 @@ public class ComputePlanAction extends TriggerActionBase {
           }
           Map<String, Object> props = context.getProperties();
           props.compute("operations", (k, v) -> {
+            @SuppressWarnings({"unchecked", "rawtypes"})
             List<SolrRequest> operations = (List<SolrRequest>) v;
             if (operations == null) operations = new ArrayList<>();
             operations.add(operation);
@@ -211,6 +214,7 @@ public class ComputePlanAction extends TriggerActionBase {
   }
 
   protected int getRequestedNumOps(TriggerEvent event) {
+    @SuppressWarnings({"unchecked"})
     Collection<TriggerEvent.Op> ops = (Collection<TriggerEvent.Op>) event.getProperty(TriggerEvent.REQUESTED_OPS, Collections.emptyList());
     if (ops.isEmpty()) {
       return -1;
@@ -233,6 +237,7 @@ public class ComputePlanAction extends TriggerActionBase {
       case SEARCHRATE:
       case METRIC:
       case INDEXSIZE:
+        @SuppressWarnings({"unchecked"})
         List<TriggerEvent.Op> ops = (List<TriggerEvent.Op>)event.getProperty(TriggerEvent.REQUESTED_OPS, Collections.emptyList());
         int start = (Integer)event.getProperty(START, 0);
         if (ops.isEmpty() || start >= ops.size()) {
@@ -241,6 +246,7 @@ public class ComputePlanAction extends TriggerActionBase {
         TriggerEvent.Op op = ops.get(start);
         suggester = session.getSuggester(op.getAction());
         if (suggester instanceof UnsupportedSuggester) {
+          @SuppressWarnings({"unchecked"})
           List<TriggerEvent.Op> unsupportedOps = (List<TriggerEvent.Op>)context.getProperties().computeIfAbsent(TriggerEvent.UNSUPPORTED_OPS, k -> new ArrayList<TriggerEvent.Op>());
           unsupportedOps.add(op);
         }
@@ -274,6 +280,7 @@ public class ComputePlanAction extends TriggerActionBase {
         return s;
       case DELETENODE:
         int start = (Integer)event.getProperty(START, 0);
+        @SuppressWarnings({"unchecked"})
         List<String> srcNodes = (List<String>) event.getProperty(NODE_NAMES);
         if (srcNodes.isEmpty() || start >= srcNodes.size()) {
           return NoneSuggester.get(session);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
index e09b3ae..1dfc3b1 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ExecutePlanAction.java
@@ -76,6 +76,7 @@ public class ExecutePlanAction extends TriggerActionBase {
   }
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void process(TriggerEvent event, ActionContext context) throws Exception {
     if (log.isDebugEnabled()) {
       log.debug("-- processing event: {} with context properties: {}", event, context.getProperties());
@@ -163,6 +164,7 @@ public class ExecutePlanAction extends TriggerActionBase {
           }
           NamedList<Object> result = response.getResponse();
           context.getProperties().compute("responses", (s, o) -> {
+            @SuppressWarnings({"unchecked"})
             List<NamedList<Object>> responses = (List<NamedList<Object>>) o;
             if (responses == null)  responses = new ArrayList<>(operations.size());
             responses.add(result);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
index 06c69a4..d3de649 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveShardPlanAction.java
@@ -97,6 +97,7 @@ public class InactiveShardPlanAction extends TriggerActionBase {
             if (log.isDebugEnabled()) {
               log.debug("-- delete inactive {} / {}", coll.getName(), s.getName());
             }
+            @SuppressWarnings({"unchecked", "rawtypes"})
             List<SolrRequest> operations = (List<SolrRequest>)context.getProperties().computeIfAbsent("operations", k -> new ArrayList<>());
             operations.add(CollectionAdminRequest.deleteShard(coll.getName(), s.getName()));
             cleanup.computeIfAbsent(coll.getName(), c -> new ArrayList<>()).add(s.getName());
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
index 327a070..da40366 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
@@ -218,6 +218,7 @@ public class IndexSizeTrigger extends TriggerBase {
   }
 
   @Override
+  @SuppressWarnings({"unchecked"})
   protected void setState(Map<String, Object> state) {
     this.lastAboveEventMap.clear();
     this.lastBelowEventMap.clear();
@@ -248,6 +249,7 @@ public class IndexSizeTrigger extends TriggerBase {
   }
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void run() {
     synchronized(this) {
       if (isClosed) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java
index 9058a9a..573ac77 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/MetricTrigger.java
@@ -91,6 +91,7 @@ public class MetricTrigger extends TriggerBase {
   @Override
   protected void setState(Map<String, Object> state) {
     lastNodeEvent.clear();
+    @SuppressWarnings({"unchecked"})
     Map<String, Long> nodeTimes = (Map<String, Long>) state.get("lastNodeEvent");
     if (nodeTimes != null) {
       lastNodeEvent.putAll(nodeTimes);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
index 6ae77bb..42188e4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
@@ -153,10 +153,12 @@ public class NodeAddedTrigger extends TriggerBase {
   protected void setState(Map<String, Object> state) {
     this.lastLiveNodes.clear();
     this.nodeNameVsTimeAdded.clear();
+    @SuppressWarnings({"unchecked"})
     Collection<String> lastLiveNodes = (Collection<String>)state.get("lastLiveNodes");
     if (lastLiveNodes != null) {
       this.lastLiveNodes.addAll(lastLiveNodes);
     }
+    @SuppressWarnings({"unchecked"})
     Map<String,Long> nodeNameVsTimeAdded = (Map<String,Long>)state.get("nodeNameVsTimeAdded");
     if (nodeNameVsTimeAdded != null) {
       this.nodeNameVsTimeAdded.putAll(nodeNameVsTimeAdded);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
index 0a7a267..b1c5818 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
@@ -143,10 +143,12 @@ public class NodeLostTrigger extends TriggerBase {
   protected void setState(Map<String, Object> state) {
     this.lastLiveNodes.clear();
     this.nodeNameVsTimeRemoved.clear();
+    @SuppressWarnings({"unchecked"})
     Collection<String> lastLiveNodes = (Collection<String>)state.get("lastLiveNodes");
     if (lastLiveNodes != null) {
       this.lastLiveNodes.addAll(lastLiveNodes);
     }
+    @SuppressWarnings({"unchecked"})
     Map<String,Long> nodeNameVsTimeRemoved = (Map<String,Long>)state.get("nodeNameVsTimeRemoved");
     if (nodeNameVsTimeRemoved != null) {
       this.nodeNameVsTimeRemoved.putAll(nodeNameVsTimeRemoved);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
index d84bff4..e080eec 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/ScheduledTriggers.java
@@ -320,6 +320,7 @@ public class ScheduledTriggers implements Closeable {
 
               ActionContext actionContext = new ActionContext(cloudManager, newTrigger, new HashMap<>());
               for (TriggerAction action : actions) {
+                @SuppressWarnings({"unchecked"})
                 List<String> beforeActions = (List<String>) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.BEFORE_ACTION.toString(), k -> new ArrayList<String>());
                 beforeActions.add(action.getName());
                 triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.BEFORE_ACTION, action.getName(), actionContext);
@@ -329,6 +330,7 @@ public class ScheduledTriggers implements Closeable {
                   triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.FAILED, action.getName(), actionContext, e, null);
                   throw new TriggerActionException(event.getSource(), action.getName(), "Error processing action for trigger event: " + event, e);
                 }
+                @SuppressWarnings({"unchecked"})
                 List<String> afterActions = (List<String>) actionContext.getProperties().computeIfAbsent(TriggerEventProcessorStage.AFTER_ACTION.toString(), k -> new ArrayList<String>());
                 afterActions.add(action.getName());
                 triggerListeners1.fireListeners(event.getSource(), event, TriggerEventProcessorStage.AFTER_ACTION, action.getName(), actionContext);
@@ -428,6 +430,7 @@ public class ScheduledTriggers implements Closeable {
               String path = parentPath + '/' + child;
               VersionedData data = stateManager.getData(path, null);
               if (data != null) {
+                @SuppressWarnings({"rawtypes"})
                 Map map = (Map) Utils.fromJSON(data.getData());
                 String requestid = (String) map.get("requestid");
                 try {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
index 9539e29..efd5b24 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SearchRateTrigger.java
@@ -280,18 +280,22 @@ public class SearchRateTrigger extends TriggerBase {
     lastNodeEvent.clear();
     lastShardEvent.clear();
     lastReplicaEvent.clear();
+    @SuppressWarnings({"unchecked"})
     Map<String, Long> collTimes = (Map<String, Long>)state.get("lastCollectionEvent");
     if (collTimes != null) {
       lastCollectionEvent.putAll(collTimes);
     }
+    @SuppressWarnings({"unchecked"})
     Map<String, Long> nodeTimes = (Map<String, Long>)state.get("lastNodeEvent");
     if (nodeTimes != null) {
       lastNodeEvent.putAll(nodeTimes);
     }
+    @SuppressWarnings({"unchecked"})
     Map<String, Long> shardTimes = (Map<String, Long>)state.get("lastShardEvent");
     if (shardTimes != null) {
       lastShardEvent.putAll(shardTimes);
     }
+    @SuppressWarnings({"unchecked"})
     Map<String, Long> replicaTimes = (Map<String, Long>)state.get("lastReplicaEvent");
     if (replicaTimes != null) {
       lastReplicaEvent.putAll(replicaTimes);
@@ -651,6 +655,7 @@ public class SearchRateTrigger extends TriggerBase {
   /**
    * This method implements a primitive form of proportional controller with a limiter.
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   private void addReplicaHints(String collection, String shard, double r, int replicationFactor, List<Pair<String, String>> hints) {
     int numReplicas = (int)Math.round((r - aboveRate) / (double) replicationFactor);
     // in one event add at least 1 replica
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java
index 09b0865..b841478 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/SystemLogListener.java
@@ -80,6 +80,7 @@ public class SystemLogListener extends TriggerListenerBase {
   }
 
   @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName, ActionContext context,
                Throwable error, String message) throws Exception {
     try {
@@ -153,6 +154,7 @@ public class SystemLogListener extends TriggerListenerBase {
     });
   }
 
+  @SuppressWarnings({"rawtypes"})
   private void addOperations(SolrInputDocument doc, List<SolrRequest> operations) {
     if (operations == null || operations.isEmpty()) {
       return;
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
index 535fd00..d045f6a 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
@@ -109,6 +109,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
     }
     this.enabled = Boolean.parseBoolean(String.valueOf(this.properties.getOrDefault("enabled", "true")));
     this.waitForSecond = ((Number) this.properties.getOrDefault("waitFor", -1L)).intValue();
+    @SuppressWarnings({"unchecked"})
     List<Map<String, Object>> o = (List<Map<String, Object>>) properties.get("actions");
     if (o != null && !o.isEmpty()) {
       actions = new ArrayList<>(3);
@@ -243,6 +244,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
    * @see #getState
    * @lucene.internal
    */
+  @SuppressWarnings({"unchecked"})
   public Map<String,Object> deepCopyState() {
     return Utils.getDeepCopy(getState(), 10, false, true);
   }
@@ -273,6 +275,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
   }
 
   @Override
+  @SuppressWarnings({"unchecked"})
   public void restoreState() {
     byte[] data = null;
     String path = ZkStateReader.SOLR_AUTOSCALING_TRIGGER_STATE_PATH + "/" + getName();
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java
index c61556c..91482e5 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEvent.java
@@ -59,6 +59,7 @@ public class TriggerEvent implements MapWriter {
       addHint(hint, hintValue);
     }
 
+    @SuppressWarnings({"unchecked"})
     public void addHint(Suggester.Hint hint, Object value) {
       hint.validator.accept(value);
       if (hint.multiValued) {
@@ -85,6 +86,7 @@ public class TriggerEvent implements MapWriter {
       ew.put("hints", hints);
     }
 
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public static Op fromMap(Map<String, Object> map) {
       if (!map.containsKey("action")) {
         return null;
@@ -281,6 +283,7 @@ public class TriggerEvent implements MapWriter {
     return Utils.toJSONString(this);
   }
 
+  @SuppressWarnings({"unchecked"})
   public static TriggerEvent fromMap(Map<String, Object> map) {
     String id = (String)map.get("id");
     String source = (String)map.get("source");
@@ -294,6 +297,7 @@ public class TriggerEvent implements MapWriter {
     return res;
   }
 
+  @SuppressWarnings({"unchecked"})
   public static void fixOps(String type, Map<String, Object> properties) {
     List<Object> ops = (List<Object>)properties.get(type);
     if (ops != null && !ops.isEmpty()) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
index 9f2da7a..ec41495 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
@@ -72,6 +72,7 @@ public class TriggerEventQueue {
           continue;
         }
         try {
+          @SuppressWarnings({"unchecked"})
           Map<String, Object> map = (Map<String, Object>) Utils.fromJSON(data);
           return fromMap(map);
         } catch (Exception e) {
@@ -98,6 +99,7 @@ public class TriggerEventQueue {
           continue;
         }
         try {
+          @SuppressWarnings({"unchecked"})
           Map<String, Object> map = (Map<String, Object>) Utils.fromJSON(data);
           return fromMap(map);
         } catch (Exception e) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java
index 71a1ce4..cecd933 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerUtils.java
@@ -61,6 +61,7 @@ public class TriggerUtils {
     }
   }
 
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public static void checkProperty(Map<String, Object> properties, Map<String, String> results, String name, boolean required, Class... acceptClasses) {
     Object value = properties.get(name);
     if (value == null) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index aa2d7d0..25624f4 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -216,6 +216,7 @@ public class SimCloudManager implements SolrCloudManager {
 
     solrClient = new MockSearchableSolrClient() {
       @Override
+      @SuppressWarnings({"rawtypes"})
       public NamedList<Object> request(SolrRequest request, String collection) throws SolrServerException, IOException {
         if (collection != null) {
           if (request instanceof AbstractUpdateRequest) {
@@ -625,6 +626,7 @@ public class SimCloudManager implements SolrCloudManager {
    * @return future to obtain results
    * @see #getBackgroundTaskFailureCount
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public <T> Future<T> submit(Callable<T> callable) {
     return simCloudManagerPool.submit(new LoggingCallable(backgroundTaskFailureCounter, callable));
   }
@@ -714,6 +716,7 @@ public class SimCloudManager implements SolrCloudManager {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public SolrResponse request(SolrRequest req) throws IOException {
     try {
       // NOTE: we're doing 2 odd things here:
@@ -742,6 +745,8 @@ public class SimCloudManager implements SolrCloudManager {
    * @param req autoscaling request
    * @return results
    */
+
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public SolrResponse simHandleSolrRequest(SolrRequest req) throws IOException, InterruptedException {
     // pay the penalty for remote request, at least 5 ms
     timeSource.sleep(5);
@@ -867,6 +872,7 @@ public class SimCloudManager implements SolrCloudManager {
       if (log.isTraceEnabled()) {
         log.trace("Invoking Collection Action :{} with params {}", action.toLower(), params.toQueryString());
       }
+      @SuppressWarnings({"rawtypes"})
       NamedList results = new NamedList();
       rsp.setResponse(results);
       incrementCount(action.name());
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index b76f9b5..7e5343d 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -171,6 +171,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       this.zkVersion = zkVersion;
     }
 
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public DocCollection getColl() throws InterruptedException, IOException {
       DocCollection dc = coll;
       if (dc != null) {
@@ -321,6 +322,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * Initialize from an existing cluster state
    * @param initialState initial cluster state
    */
+  @SuppressWarnings({"unchecked"})
   public void simSetClusterState(ClusterState initialState) throws Exception {
     lock.lockInterruptibly();
     try {
@@ -412,6 +414,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
   }
 
   private ReplicaInfo getReplicaInfo(Replica r) {
+    @SuppressWarnings({"unchecked"})
     final List<ReplicaInfo> list = nodeReplicaMap.computeIfAbsent
       (r.getNodeName(), Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN);
     synchronized (list) {
@@ -428,6 +431,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * Add a new node to the cluster.
    * @param nodeId unique node id
    */
+  @SuppressWarnings({"unchecked"})
   public void simAddNode(String nodeId) throws Exception {
     ensureNotClosed();
     if (liveNodes.contains(nodeId)) {
@@ -525,6 +529,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
 
   // this method needs to be called under a lock
   private void setReplicaStates(String nodeId, Replica.State state, Set<String> changedCollections) {
+    @SuppressWarnings({"unchecked"})
     List<ReplicaInfo> replicas = nodeReplicaMap.computeIfAbsent(nodeId, Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN);
     synchronized (replicas) {
       replicas.forEach(r -> {
@@ -590,6 +595,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param message replica details
    * @param results result of the operation
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simAddReplica(ZkNodeProps message, NamedList results) throws Exception {
     if (message.getStr(CommonAdminParams.ASYNC) != null) {
       results.add(CoreAdminParams.REQUESTID, message.getStr(CommonAdminParams.ASYNC));
@@ -647,6 +653,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param replicaInfo replica info
    * @param runLeaderElection if true then run a leader election after adding the replica.
    */
+  @SuppressWarnings({"unchecked"})
   public void simAddReplica(String nodeId, ReplicaInfo replicaInfo, boolean runLeaderElection) throws Exception {
     ensureNotClosed();
     lock.lockInterruptibly();
@@ -745,6 +752,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     
     lock.lockInterruptibly();
     try {
+      @SuppressWarnings({"unchecked"})
       final List<ReplicaInfo> replicas = nodeReplicaMap.computeIfAbsent
         (nodeId, Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN);
       synchronized (replicas) {
@@ -983,6 +991,8 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param props collection details
    * @param results results of the operation.
    */
+
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simCreateCollection(ZkNodeProps props, NamedList results) throws Exception {
     ensureNotClosed();
     if (props.getStr(CommonAdminParams.ASYNC) != null) {
@@ -1160,6 +1170,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param async async id
    * @param results results of the operation
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simDeleteCollection(String collection, String async, NamedList results) throws Exception {
     ensureNotClosed();
     if (async != null) {
@@ -1254,6 +1265,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param message operation details
    * @param results operation results.
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simMoveReplica(ZkNodeProps message, NamedList results) throws Exception {
     ensureNotClosed();
     if (message.getStr(CommonAdminParams.ASYNC) != null) {
@@ -1320,6 +1332,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param message operation details
    * @param results operation results
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simCreateShard(ZkNodeProps message, NamedList results) throws Exception {
     ensureNotClosed();
     if (message.getStr(CommonAdminParams.ASYNC) != null) {
@@ -1386,6 +1399,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param message operation details
    * @param results operation results.
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simSplitShard(ZkNodeProps message, NamedList results) throws Exception {
     ensureNotClosed();
     if (message.getStr(CommonAdminParams.ASYNC) != null) {
@@ -1583,6 +1597,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @param message operation details
    * @param results operation results
    */
+  @SuppressWarnings({"unchecked", "rawtypes"})
   public void simDeleteShard(ZkNodeProps message, NamedList results) throws Exception {
     ensureNotClosed();
     if (message.getStr(CommonAdminParams.ASYNC) != null) {
@@ -1626,6 +1641,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
   }
 
+  @SuppressWarnings({"rawtypes"})
   public void createSystemCollection() throws IOException {
     try {
 
@@ -2040,6 +2056,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * Saves cluster properties to clusterprops.json.
    * @return current properties
    */
+  @SuppressWarnings({"unchecked"})
   private synchronized Map<String, Object> saveClusterProperties() throws Exception {
     if (lastSavedProperties != null && lastSavedProperties.equals(clusterProperties)) {
       return lastSavedProperties;
@@ -2276,6 +2293,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
   }
 
+  @SuppressWarnings({"unchecked"})
   public void simSetReplicaValues(String node, Map<String, Map<String, List<ReplicaInfo>>> source, boolean overwrite) {
     List<ReplicaInfo> infos = nodeReplicaMap.get(node);
     if (infos == null) {
@@ -2307,6 +2325,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
    * @return copy of the list of replicas on that node, or empty list if none
    */
   public List<ReplicaInfo> simGetReplicaInfos(String node) {
+    @SuppressWarnings({"unchecked"})
     final List<ReplicaInfo> replicas = nodeReplicaMap.computeIfAbsent
       (node, Utils.NEW_SYNCHRONIZED_ARRAYLIST_FUN);
     // make a defensive copy to avoid ConcurrentModificationException
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
index bc845e4..ea9fa55 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
@@ -619,6 +619,7 @@ public class SimDistribStateManager implements DistribStateManager {
   }
 
   @Override
+  @SuppressWarnings({"unchecked"})
   public AutoScalingConfig getAutoScalingConfig(Watcher watcher) throws InterruptedException, IOException {
     Map<String, Object> map = new HashMap<>();
     int version = 0;
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
index 318f63e..fb17881 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
@@ -184,6 +184,7 @@ public class SimDistributedQueueFactory implements DistributedQueueFactory {
     }
 
     @Override
+    @SuppressWarnings({"unchecked", "rawtypes"})
     public void offer(byte[] data) throws Exception {
       Timer.Context time = stats.time(dir + "_offer");
       updateLock.lockInterruptibly();
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
index 2a8103c..17b6d28 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
@@ -152,6 +152,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
    * @param key property name
    * @param value property value.
    */
+  @SuppressWarnings({"unchecked"})
   public void simAddNodeValue(String node, String key, Object value) throws InterruptedException {
     lock.lockInterruptibly();
     try {
@@ -341,7 +342,9 @@ public class SimNodeStateProvider implements NodeStateProvider {
     Map<String, Map<String, List<ReplicaInfo>>> res = new HashMap<>();
     // TODO: probably needs special treatment for "metrics:solr.core..." tags
     for (ReplicaInfo r : replicas) {
+      @SuppressWarnings({"unchecked"})
       Map<String, List<ReplicaInfo>> perCollection = res.computeIfAbsent(r.getCollection(), Utils.NEW_HASHMAP_FUN);
+      @SuppressWarnings({"unchecked"})
       List<ReplicaInfo> perShard = perCollection.computeIfAbsent(r.getShard(), Utils.NEW_ARRAYLIST_FUN);
       // XXX filter out some properties?
       perShard.add(r);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
index a42ebc1..6adb812 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimScenario.java
@@ -130,6 +130,7 @@ public class SimScenario implements AutoCloseable {
      * {@link #execute(SimScenario)}.
      * @param scenario current scenario
      */
+    @SuppressWarnings({"unchecked"})
     public void prepareCurrentParams(SimScenario scenario) {
       Properties props = new Properties();
       scenario.context.forEach((k, v) -> {
@@ -416,6 +417,7 @@ public class SimScenario implements AutoCloseable {
    */
   public static class LoadAutoscaling extends SimOp {
     @Override
+    @SuppressWarnings({"unchecked"})
     public void execute(SimScenario scenario) throws Exception {
       Map<String, Object> map;
       boolean addDefaults = Boolean.parseBoolean(params.get("withDefaultTriggers", "true"));
@@ -540,9 +542,11 @@ public class SimScenario implements AutoCloseable {
   public static class ApplySuggestions extends SimOp {
     @Override
     public void execute(SimScenario scenario) throws Exception {
+      @SuppressWarnings({"unchecked"})
       List<Suggester.SuggestionInfo> suggestions = (List<Suggester.SuggestionInfo>) scenario.context.getOrDefault(SUGGESTIONS_CTX_PROP, Collections.emptyList());
       int unresolvedCount = 0;
       for (Suggester.SuggestionInfo suggestion : suggestions) {
+        @SuppressWarnings({"rawtypes"})
         SolrRequest operation = suggestion.getOperation();
         if (operation == null) {
           unresolvedCount++;
@@ -596,6 +600,7 @@ public class SimScenario implements AutoCloseable {
         req.setContentWriter(new RequestWriter.StringPayloadContentWriter(streamBody, "application/json"));
       }
       SolrResponse rsp = scenario.cluster.request(req);
+      @SuppressWarnings({"unchecked"})
       List<SolrResponse> responses = (List<SolrResponse>) scenario.context.computeIfAbsent(RESPONSES_CTX_PROP, Utils.NEW_ARRAYLIST_FUN);
       responses.add(rsp);
     }
@@ -705,6 +710,7 @@ public class SimScenario implements AutoCloseable {
   /**
    * Set a temporary listener to wait for a specific trigger event processing.
    */
+  @SuppressWarnings({"unchecked"})
   public static class SetEventListener extends SimOp {
     @Override
     public void execute(SimScenario scenario) throws Exception {
@@ -764,6 +770,7 @@ public class SimScenario implements AutoCloseable {
         listener.wait(waitSec);
         scenario.context.remove(TRIGGER_EVENT_PREFIX + trigger);
         if (listener.getEvent() != null) {
+          @SuppressWarnings({"unchecked"})
           Map<String, Object> ev = listener.getEvent().toMap(new LinkedHashMap<>());
           scenario.context.put(TRIGGER_EVENT_PREFIX + trigger, ev);
         }
@@ -941,6 +948,7 @@ public class SimScenario implements AutoCloseable {
    */
   public static class Dump extends SimOp {
     @Override
+    @SuppressWarnings({"unchecked"})
     public void execute(SimScenario scenario) throws Exception {
       boolean redact = Boolean.parseBoolean(params.get("redact", "false"));
       boolean withData = Boolean.parseBoolean(params.get("withData", "false"));
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java
index 6e04200..03c1f5b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SimUtils.java
@@ -349,6 +349,7 @@ public class SimUtils {
    * @param req request
    * @return request payload and parameters converted to V1 params
    */
+  @SuppressWarnings({"unchecked"})
   public static ModifiableSolrParams v2AdminRequestToV1Params(V2Request req) {
     Map<String, Object> reqMap = new HashMap<>();
     req.toMap(reqMap);
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java
index 8ea286f..9dc0b4a 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotCloudManager.java
@@ -90,6 +90,7 @@ public class SnapshotCloudManager implements SolrCloudManager {
     SimUtils.checkConsistency(this, config);
   }
 
+  @SuppressWarnings({"unchecked"})
   public SnapshotCloudManager(Map<String, Object> snapshot) throws Exception {
     Objects.requireNonNull(snapshot);
     init(
@@ -120,6 +121,7 @@ public class SnapshotCloudManager implements SolrCloudManager {
     }
   }
 
+  @SuppressWarnings({"unchecked"})
   public static SnapshotCloudManager readSnapshot(File sourceDir) throws Exception {
     if (!sourceDir.exists()) {
       throw new Exception("Source path doesn't exist: " + sourceDir);
@@ -241,6 +243,7 @@ public class SnapshotCloudManager implements SolrCloudManager {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public SolrResponse request(SolrRequest req) throws IOException {
     throw new UnsupportedOperationException("request");
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java
index 351265d..e011b4c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotClusterStateProvider.java
@@ -51,6 +51,7 @@ public class SnapshotClusterStateProvider implements ClusterStateProvider {
     clusterProperties = new HashMap<>(other.getClusterProperties());
   }
 
+  @SuppressWarnings({"unchecked"})
   public SnapshotClusterStateProvider(Map<String, Object> snapshot) {
     Objects.requireNonNull(snapshot);
     liveNodes = Set.copyOf((Collection<String>)snapshot.getOrDefault("liveNodes", Collections.emptySet()));
@@ -93,6 +94,7 @@ public class SnapshotClusterStateProvider implements ClusterStateProvider {
       coll.write(writer);
       String json = out.toString();
       try {
+        @SuppressWarnings({"unchecked"})
         Map<String, Object> collMap = new LinkedHashMap<>((Map<String, Object>)Utils.fromJSON(json.getBytes("UTF-8")));
         collMap.put("zNodeVersion", coll.getZNodeVersion());
         collMap.put("zNode", coll.getZNode());
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java
index fc6bd2d..eb3a29f 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotDistribStateManager.java
@@ -89,6 +89,7 @@ public class SnapshotDistribStateManager implements DistribStateManager {
    */
   public SnapshotDistribStateManager(Map<String, Object> snapshot, AutoScalingConfig config) {
     snapshot.forEach((path, value) -> {
+      @SuppressWarnings({"unchecked"})
       Map<String, Object> map = (Map<String, Object>)value;
       Number version = (Number)map.getOrDefault("version", 0);
       String owner = (String)map.get("owner");
@@ -209,6 +210,7 @@ public class SnapshotDistribStateManager implements DistribStateManager {
   }
 
   @Override
+  @SuppressWarnings({"unchecked"})
   public AutoScalingConfig getAutoScalingConfig(Watcher watcher) throws InterruptedException, IOException {
     VersionedData vd = dataMap.get(ZkStateReader.SOLR_AUTOSCALING_CONF_PATH);
     Map<String, Object> map = new HashMap<>();
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java
index 8ccf849..e8b7828 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/sim/SnapshotNodeStateProvider.java
@@ -48,6 +48,7 @@ public class SnapshotNodeStateProvider implements NodeStateProvider {
    * @param config optional {@link AutoScalingConfig}, which will be used to determine what node and
    *               replica tags to retrieve. If this is null then the other instance's config will be used.
    */
+  @SuppressWarnings({"unchecked"})
   public SnapshotNodeStateProvider(SolrCloudManager other, AutoScalingConfig config) throws Exception {
     if (config == null) {
       config = other.getDistribStateManager().getAutoScalingConfig();
@@ -96,6 +97,7 @@ public class SnapshotNodeStateProvider implements NodeStateProvider {
    * Populate this instance from a previously generated snapshot.
    * @param snapshot previous snapshot created using this class.
    */
+  @SuppressWarnings({"unchecked"})
   public SnapshotNodeStateProvider(Map<String, Object> snapshot) {
     Objects.requireNonNull(snapshot);
     nodeValues = (Map<String, Map<String, Object>>)snapshot.getOrDefault("nodeValues", Collections.emptyMap());
@@ -130,6 +132,7 @@ public class SnapshotNodeStateProvider implements NodeStateProvider {
    * Create a snapshot of all node and replica tag values available from the original source, per the original
    * autoscaling configuration. Note:
    */
+  @SuppressWarnings({"unchecked"})
   public Map<String, Object> getSnapshot() {
     Map<String, Object> snapshot = new LinkedHashMap<>();
     snapshot.put("nodeValues", nodeValues);


[lucene-solr] 21/47: SOLR-14495: Fix or suppress warnings in solr/search/function

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit cf98a612649212d5963950b3651c16698578bbdf
Author: Erick Erickson <Er...@gmail.com>
AuthorDate: Fri May 22 13:40:20 2020 -0400

    SOLR-14495: Fix or suppress warnings in solr/search/function
---
 solr/CHANGES.txt                                              |  6 ++++++
 .../src/java/org/apache/solr/search/FunctionRangeQuery.java   |  3 +++
 .../apache/solr/search/function/CollapseScoreFunction.java    |  2 ++
 .../org/apache/solr/search/function/FieldNameValueSource.java |  1 +
 .../java/org/apache/solr/search/function/FileFloatSource.java | 11 +++++++++--
 .../org/apache/solr/search/function/MultiStringFunction.java  |  1 +
 .../java/org/apache/solr/search/function/OrdFieldSource.java  |  1 +
 .../apache/solr/search/function/ReverseOrdFieldSource.java    |  1 +
 .../apache/solr/search/function/ValueSourceRangeFilter.java   |  2 ++
 .../search/function/distance/GeoDistValueSourceParser.java    |  3 ++-
 .../apache/solr/search/function/distance/GeohashFunction.java |  1 +
 .../search/function/distance/GeohashHaversineFunction.java    |  2 ++
 .../solr/search/function/distance/HaversineConstFunction.java |  2 ++
 .../solr/search/function/distance/HaversineFunction.java      |  2 ++
 .../solr/search/function/distance/StringDistanceFunction.java |  1 +
 .../solr/search/function/distance/VectorDistanceFunction.java |  2 ++
 16 files changed, 38 insertions(+), 3 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 51c0fec..62f3a9c 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -253,6 +253,12 @@ Other Changes
 
 * SOLR-14482: Fix or suppress warnings in solr/search/facet (Erick Erickson)
 
+
+* SOLR-14485: Fix or suppress 11 resource leak warnings in apache/solr/cloud (Andras Salaman via
+  Erick Erickson)
+
+* SOLR-14495: Fix or suppress warnings in solr/search/function (Erick Erickson)
+
 ==================  8.5.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java b/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java
index fdcdfc3..489d86f 100644
--- a/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java
+++ b/solr/core/src/java/org/apache/solr/search/FunctionRangeQuery.java
@@ -41,17 +41,20 @@ public class FunctionRangeQuery extends SolrConstantScoreQuery implements PostFi
 
   @Override
   public DelegatingCollector getFilterCollector(IndexSearcher searcher) {
+    @SuppressWarnings({"rawtypes"})
     Map fcontext = ValueSource.newContext(searcher);
     Weight weight = rangeFilt.createWeight(searcher, ScoreMode.COMPLETE, 1);
     return new FunctionRangeCollector(fcontext, weight);
   }
 
   class FunctionRangeCollector extends DelegatingCollector {
+    @SuppressWarnings({"rawtypes"})
     final Map fcontext;
     final Weight weight;
     ValueSourceScorer scorer;
     int maxdoc;
 
+    @SuppressWarnings({"rawtypes"})
     public FunctionRangeCollector(Map fcontext, Weight weight) {
       this.fcontext = fcontext;
       this.weight = weight;
diff --git a/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java b/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java
index 3932f56..69a3d59 100644
--- a/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/CollapseScoreFunction.java
@@ -41,6 +41,7 @@ public class CollapseScoreFunction extends ValueSource {
     return 1213241257;
   }
 
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     return new CollapseScoreFunctionValues(context);
   }
@@ -49,6 +50,7 @@ public class CollapseScoreFunction extends ValueSource {
 
     private CollapseScore cscore;
 
+    @SuppressWarnings({"rawtypes"})
     public CollapseScoreFunctionValues(Map context) {
       this.cscore = (CollapseScore) context.get("CSCORE");
       assert null != this.cscore;
diff --git a/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java b/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java
index c122dbb..7b5714a 100644
--- a/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java
+++ b/solr/core/src/java/org/apache/solr/search/function/FieldNameValueSource.java
@@ -39,6 +39,7 @@ public class FieldNameValueSource extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     throw new UnsupportedOperationException("FieldNameValueSource should not be directly used: " + this);
   }
diff --git a/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java b/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java
index 3b2eb23..183cf2d 100644
--- a/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java
+++ b/solr/core/src/java/org/apache/solr/search/function/FileFloatSource.java
@@ -88,6 +88,8 @@ public class FileFloatSource extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
+
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final int off = readerContext.docBase;
     IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(readerContext);
@@ -165,29 +167,34 @@ public class FileFloatSource extends ValueSource {
 
   /** Internal cache. (from lucene FieldCache) */
   abstract static class Cache {
+    @SuppressWarnings({"rawtypes"})
     private final Map readerCache = new WeakHashMap();
 
     protected abstract Object createValue(IndexReader reader, Object key);
 
+    @SuppressWarnings({"unchecked"})
     public void refresh(IndexReader reader, Object key) {
       Object refreshedValues = createValue(reader, key);
       synchronized (readerCache) {
+        @SuppressWarnings({"rawtypes"})
         Map innerCache = (Map) readerCache.get(reader);
         if (innerCache == null) {
-          innerCache = new HashMap();
+          innerCache = new HashMap<>();
           readerCache.put(reader, innerCache);
         }
         innerCache.put(key, refreshedValues);
       }
     }
 
+    @SuppressWarnings({"unchecked"})
     public Object get(IndexReader reader, Object key) {
+      @SuppressWarnings({"rawtypes"})
       Map innerCache;
       Object value;
       synchronized (readerCache) {
         innerCache = (Map) readerCache.get(reader);
         if (innerCache == null) {
-          innerCache = new HashMap();
+          innerCache = new HashMap<>();
           readerCache.put(reader, innerCache);
           value = null;
         } else {
diff --git a/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java b/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java
index c4aef6c..eca6f3c 100644
--- a/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/MultiStringFunction.java
@@ -60,6 +60,7 @@ public abstract class MultiStringFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues[] valsArr = new FunctionValues[sources.length];
     for (int i=0; i<sources.length; i++) {
diff --git a/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java b/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
index 9681995..f4053c6 100644
--- a/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
+++ b/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java
@@ -71,6 +71,7 @@ public class OrdFieldSource extends ValueSource {
 
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final int off = readerContext.docBase;
     final LeafReader r;
diff --git a/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java b/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
index d75dca3..a505709 100644
--- a/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
+++ b/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java
@@ -71,6 +71,7 @@ public class ReverseOrdFieldSource extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final int off = readerContext.docBase;
     final LeafReader r;
diff --git a/solr/core/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java b/solr/core/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java
index 9f919ed..c0c0a52 100644
--- a/solr/core/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java
+++ b/solr/core/src/java/org/apache/solr/search/function/ValueSourceRangeFilter.java
@@ -78,6 +78,7 @@ public class ValueSourceRangeFilter extends SolrFilter {
 
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public DocIdSet getDocIdSet(final Map context, final LeafReaderContext readerContext, Bits acceptDocs) throws IOException {
     // NB the IndexSearcher parameter here can be null because Filter Weights don't
     // actually use it.
@@ -101,6 +102,7 @@ public class ValueSourceRangeFilter extends SolrFilter {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     valueSource.createWeight(context, searcher);
   }
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java
index 4f76b87..c61763c 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeoDistValueSourceParser.java
@@ -193,6 +193,7 @@ public class GeoDistValueSourceParser extends ValueSourceParser {
     SchemaField sf = fp.getReq().getSchema().getField(sfield);
     FieldType type = sf.getType();
     if (type instanceof AbstractSpatialFieldType) {
+      @SuppressWarnings({"rawtypes"})
       AbstractSpatialFieldType asft = (AbstractSpatialFieldType) type;
       return new SpatialStrategyMultiValueSource(asft.getStrategy(sfield), asft.getDistanceUnits());
     }
@@ -209,7 +210,7 @@ public class GeoDistValueSourceParser extends ValueSourceParser {
 
     final SpatialStrategy strategy;
     final DistanceUnits distanceUnits;
-
+    @SuppressWarnings({"unchecked"})
     public SpatialStrategyMultiValueSource(SpatialStrategy strategy, DistanceUnits distanceUnits) {
       super(Collections.EMPTY_LIST);
       this.strategy = strategy;
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java
index 4e38843..a6090c6 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashFunction.java
@@ -45,6 +45,7 @@ public class GeohashFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues latDV = lat.getValues(context, readerContext);
     final FunctionValues lonDV = lon.getValues(context, readerContext);
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java
index 40c7aa7..0af9367 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/GeohashHaversineFunction.java
@@ -59,6 +59,7 @@ public class GeohashHaversineFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues gh1DV = geoHash1.getValues(context, readerContext);
     final FunctionValues gh2DV = geoHash2.getValues(context, readerContext);
@@ -96,6 +97,7 @@ public class GeohashHaversineFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     geoHash1.createWeight(context, searcher);
     geoHash2.createWeight(context, searcher);
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java
index e489ff4..2433c8d 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineConstFunction.java
@@ -56,6 +56,7 @@ public class HaversineConstFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues latVals = latSource.getValues(context, readerContext);
     final FunctionValues lonVals = lonSource.getValues(context, readerContext);
@@ -84,6 +85,7 @@ public class HaversineConstFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     latSource.createWeight(context, searcher);
     lonSource.createWeight(context, searcher);
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java
index 1697f2b..b1d35ae 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/HaversineFunction.java
@@ -93,6 +93,7 @@ public class HaversineFunction extends ValueSource {
 
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues vals1 = p1.getValues(context, readerContext);
 
@@ -114,6 +115,7 @@ public class HaversineFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     p1.createWeight(context, searcher);
     p2.createWeight(context, searcher);
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java
index 8bf9f1a..0f1e5e5 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/StringDistanceFunction.java
@@ -43,6 +43,7 @@ public class StringDistanceFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
     final FunctionValues str1DV = str1.getValues(context, readerContext);
     final FunctionValues str2DV = str2.getValues(context, readerContext);
diff --git a/solr/core/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java b/solr/core/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java
index 474ece3..164f97f 100644
--- a/solr/core/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java
+++ b/solr/core/src/java/org/apache/solr/search/function/distance/VectorDistanceFunction.java
@@ -149,6 +149,7 @@ public class VectorDistanceFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
 
     final FunctionValues vals1 = source1.getValues(context, readerContext);
@@ -177,6 +178,7 @@ public class VectorDistanceFunction extends ValueSource {
   }
 
   @Override
+  @SuppressWarnings({"rawtypes"})
   public void createWeight(Map context, IndexSearcher searcher) throws IOException {
     source1.createWeight(context, searcher);
     source2.createWeight(context, searcher);


[lucene-solr] 09/47: LUCENE-9374: Add checkBrokenLinks gradle task (#1522)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit d00f79c7bf92e5478193713448b66a87c2dd348a
Author: Tomoko Uchida <to...@apache.org>
AuthorDate: Wed May 20 23:23:24 2020 +0900

    LUCENE-9374: Add checkBrokenLinks gradle task (#1522)
---
 build.gradle                                |  1 +
 gradle/validation/check-broken-links.gradle | 71 +++++++++++++++++++++++++++++
 2 files changed, 72 insertions(+)

diff --git a/build.gradle b/build.gradle
index 7e93c33..2e80ed8 100644
--- a/build.gradle
+++ b/build.gradle
@@ -115,6 +115,7 @@ apply from: file('gradle/validation/ecj-lint.gradle')
 apply from: file('gradle/validation/gradlew-scripts-tweaked.gradle')
 apply from: file('gradle/validation/missing-docs-check.gradle')
 apply from: file('gradle/validation/validate-log-calls.gradle')
+apply from: file('gradle/validation/check-broken-links.gradle')
 
 // Source or data regeneration tasks
 apply from: file('gradle/generation/jflex.gradle')
diff --git a/gradle/validation/check-broken-links.gradle b/gradle/validation/check-broken-links.gradle
new file mode 100644
index 0000000..bebfe45
--- /dev/null
+++ b/gradle/validation/check-broken-links.gradle
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+configure(rootProject) {
+
+  task checkBrokenLinks {
+    group 'Verification'
+    description 'Check broken links in the entire documentation'
+
+    dependsOn ':lucene:checkBrokenLinks'
+    dependsOn ':solr:checkBrokenLinks'
+  }
+
+}
+configure(subprojects.findAll { it.path in [':lucene', ':solr'] }) {
+
+  task checkBrokenLinks(type: CheckBrokenLinksTask, 'dependsOn': 'documentation')
+
+  // TODO: uncomment this line after fixing all broken links.
+  // (we can't fix the cross-project links until ant build is disabled.)
+  // check.dependsOn checkBrokenLinks
+}
+
+class CheckBrokenLinksTask extends DefaultTask {
+
+  // wraps input directory location in DirectoryProperty so as to lazily evaluate 'docroot' property
+  // (see gradle/documentation/documentation.gradle)
+  @InputDirectory
+  final DirectoryProperty docsDir = project.objects.directoryProperty()
+    .fileProvider(project.providers.provider { project.docroot })
+
+  @InputFile
+  File script = project.rootProject.file("dev-tools/scripts/checkJavadocLinks.py")
+
+  @TaskAction
+  def check() {
+    def outputFile = project.file("${getTemporaryDir()}/check-broken-links-output.txt")
+    def result
+    outputFile.withOutputStream { output ->
+      result = project.exec {
+        executable "python3"
+        ignoreExitValue = true
+        standardOutput = output
+        errorOutput = output
+        args = [
+          "-B",
+          script.absolutePath,
+          docsDir.get().getAsFile()
+        ]
+      }
+    }
+
+    if (result.getExitValue() != 0) {
+      throw new GradleException("Broken links check failed. Command output at: ${outputFile}")
+    }
+  }
+}


[lucene-solr] 35/47: SOLR-14419: adding {param:ref} to Query DSL

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 6e2cdcca792f330a287eba0a6642f43a35032c43
Author: Mikhail Khludnev <mk...@apache.org>
AuthorDate: Sun Apr 26 23:04:00 2020 +0300

    SOLR-14419: adding {param:ref} to Query DSL
---
 solr/CHANGES.txt                                   |  2 +
 .../solr/request/json/JsonQueryConverter.java      | 50 ++++++++++++++--------
 .../apache/solr/search/json/TestJsonRequest.java   | 39 ++++++++++++++++-
 3 files changed, 73 insertions(+), 18 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index aeb7945..28d8552 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -125,6 +125,8 @@ Improvements
   themselves. The collection hints are pushed down to the policy engine so operations for non-matching collections
   are not computed at all. (ab, shalin)
 
+* SOLR-14419: json.queries as well as other parameters might be referred via {"param":"ref"} in Query DSL (Mikhail Khludnev)
+
 Optimizations
 ---------------------
 * SOLR-8306: Do not collect expand documents when expand.rows=0 (Marshall Sanders, Amelia Henderson)
diff --git a/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java b/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java
index 22e15c7..e736750 100644
--- a/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java
+++ b/solr/core/src/java/org/apache/solr/request/json/JsonQueryConverter.java
@@ -98,6 +98,18 @@ class JsonQueryConverter {
           qtype = map.keySet().iterator().next();
           // FUTURE: might want to recurse here instead to handle nested tags (and add tagName as a parameter?)
         }
+      } else {
+        if (qtype.equals("param")) {
+          boolean toplevel;
+          if (toplevel=(builder.length() == 0)) {
+            builder.append("{!v=");  
+          }
+          builder.append("$").append(map.get("param"));
+          if (toplevel) {
+            builder.append("}");
+          }
+          return;
+        }
       }
 
       StringBuilder subBuilder = useSubBuilder ? new StringBuilder() : builder;
@@ -114,26 +126,30 @@ class JsonQueryConverter {
         builder.append('$').append(putParam(subBuilder.toString(), additionalParams));
       }
     } else {
-      for (Map.Entry<String, Object> entry : map.entrySet()) {
-        String key = entry.getKey();
-        if (entry.getValue() instanceof List) {
-          if (key.equals("query")) {
-            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-                "Error when parsing json query, value of query field should not be a list, found : " + entry.getValue());
-          }
-          List l = (List) entry.getValue();
-          for (Object subVal : l) {
+      if(map.size()==1 && map.keySet().iterator().next().equals("param")) {
+        builder.append("v").append("=$").append(map.get("param")).append(" ");
+      } else {
+        for (Map.Entry<String, Object> entry : map.entrySet()) {
+          String key = entry.getKey();
+          if (entry.getValue() instanceof List) {
+            if (key.equals("query")) {
+              throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+                  "Error when parsing json query, value of query field should not be a list, found : " + entry.getValue());
+            }
+            List l = (List) entry.getValue();
+            for (Object subVal : l) {
+              builder.append(key).append("=");
+              buildLocalParams(builder, subVal, true, additionalParams);
+              builder.append(" ");
+            }
+          } else {
+            if (key.equals("query")) {
+              key = "v";
+            }
             builder.append(key).append("=");
-            buildLocalParams(builder, subVal, true, additionalParams);
+            buildLocalParams(builder, entry.getValue(), true, additionalParams);
             builder.append(" ");
           }
-        } else {
-          if (key.equals("query")) {
-            key = "v";
-          }
-          builder.append(key).append("=");
-          buildLocalParams(builder, entry.getValue(), true, additionalParams);
-          builder.append(" ");
         }
       }
     }
diff --git a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java
index 2ab19e9..9451ddf 100644
--- a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java
+++ b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java
@@ -24,6 +24,7 @@ import org.apache.solr.JSONTestUtil;
 import org.apache.solr.SolrTestCaseHS;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.CaffeineCache;
 import org.apache.solr.search.DocSet;
@@ -179,7 +180,7 @@ public class TestJsonRequest extends SolrTestCaseHS {
         , "response/docs==[{id:'5', x:5.5},{id:'4', x:5.5}]"
     );
 
-
+    doParamRefDslTest(client);
 
     // test templating before parsing JSON
     client.testJQ( params("json","${OPENBRACE} query:'cat_s:A' ${CLOSEBRACE}", "json","${OPENBRACE} filter:'where_s:NY'${CLOSEBRACE}",  "OPENBRACE","{", "CLOSEBRACE","}")
@@ -407,6 +408,42 @@ public class TestJsonRequest extends SolrTestCaseHS {
 
   }
 
+  private static void doParamRefDslTest(Client client) throws Exception {
+    // referencing in dsl                //nestedqp
+    client.testJQ( params("json","{query: {query:  {param:'ref1'}}}", "ref1","{!field f=cat_s}A")
+        , "response/numFound==2"
+    );   
+    // referencing json string param
+    client.testJQ( params("json", random().nextBoolean()  ? 
+            "{query:{query:{param:'ref1'}}}"  // nestedqp
+           : "{query: {query: {query:{param:'ref1'}}}}",  // nestedqp, v local param  
+          "json",random().nextBoolean() 
+              ? "{params:{ref1:'{!field f=cat_s}A'}}" // string param  
+              : "{queries:{ref1:{field:{f:cat_s,query:A}}}}" ) // qdsl
+        , "response/numFound==2"
+    );
+    {                                                     // shortest top level ref
+      final ModifiableSolrParams params = params("json","{query:{param:'ref1'}}");
+      if (random().nextBoolean()) {
+        params.add("ref1","cat_s:A"); // either to plain string
+      } else {
+        params.add("json","{queries:{ref1:{field:{f:cat_s,query:A}}}}");// or to qdsl
+      }
+      client.testJQ( params, "response/numFound==2");
+    }  // ref in bool must
+    client.testJQ( params("json","{query:{bool: {must:[{param:fq1},{param:fq2}]}}}",
+        "json","{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", "json.fields", "id")
+        , "response/docs==[{id:'1'}]"
+    );// referencing dsl&strings from filters objs&array
+    client.testJQ( params("json.filter","{param:fq1}","json.filter","{param:fq2}",
+        "json", random().nextBoolean() ?
+             "{queries:{fq1:{lucene:{query:'cat_s:A'}}, fq2:{lucene:{query:'where_s:NY'}}}}" : 
+             "{params:{fq1:'cat_s:A', fq2:'where_s:NY'}}", 
+        "json.fields", "id", "q", "*:*")
+        , "response/docs==[{id:'1'}]"
+    );
+  }
+
   private static void testFilterCachingLocally(Client client) throws Exception {
     if(client.getClientProvider()==null) {
       final SolrQueryRequest request = req();


[lucene-solr] 37/47: LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit daf7160f3d371ae4da42f40d1ccac9eff23777e2
Author: Adrien Grand <jp...@gmail.com>
AuthorDate: Fri May 29 14:59:36 2020 +0200

    LUCENE-9359: Always call checkFooter in SegmentInfos#readCommit. (#1483)
---
 lucene/CHANGES.txt                                 |   3 +
 .../java/org/apache/lucene/index/SegmentInfos.java | 231 +++++++++++----------
 .../org/apache/lucene/index/TestSegmentInfos.java  |  61 ++++++
 3 files changed, 182 insertions(+), 113 deletions(-)

diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index cd42f6e..50b7f7b 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -195,6 +195,9 @@ Improvements
 * LUCENE-9342: TotalHits' relation will be EQUAL_TO when the number of hits is lower than TopDocsColector's numHits
   (Tomás Fernández Löbbe)
 
+* LUCENE-9359: SegmentInfos#readCommit now always returns a
+  CorruptIndexException if the content of the file is invalid. (Adrien Grand)
+
 Optimizations
 ---------------------
 
diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
index f9edccd..5475fbd 100644
--- a/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
+++ b/lucene/core/src/java/org/apache/lucene/index/SegmentInfos.java
@@ -304,136 +304,141 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
 
   /** Read the commit from the provided {@link ChecksumIndexInput}. */
   public static final SegmentInfos readCommit(Directory directory, ChecksumIndexInput input, long generation) throws IOException {
+    Throwable priorE = null;
+    try {
+      // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
+      // to read the magic ourselves.
+      int magic = input.readInt();
+      if (magic != CodecUtil.CODEC_MAGIC) {
+        throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
+      }
+      int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
+      byte id[] = new byte[StringHelper.ID_LENGTH];
+      input.readBytes(id, 0, id.length);
+      CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
+
+      Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
+      int indexCreatedVersion = input.readVInt();
+      if (luceneVersion.major < indexCreatedVersion) {
+        throw new CorruptIndexException("Creation version [" + indexCreatedVersion
+            + ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
+      }
 
-    // NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
-    // to read the magic ourselves.
-    int magic = input.readInt();
-    if (magic != CodecUtil.CODEC_MAGIC) {
-      throw new IndexFormatTooOldException(input, magic, CodecUtil.CODEC_MAGIC, CodecUtil.CODEC_MAGIC);
-    }
-    int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_70, VERSION_CURRENT);
-    byte id[] = new byte[StringHelper.ID_LENGTH];
-    input.readBytes(id, 0, id.length);
-    CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
-
-    Version luceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-    int indexCreatedVersion = input.readVInt();
-    if (luceneVersion.major < indexCreatedVersion) {
-      throw new CorruptIndexException("Creation version [" + indexCreatedVersion
-          + ".x] can't be greater than the version that wrote the segment infos: [" + luceneVersion + "]" , input);
-    }
-
-    if (indexCreatedVersion < Version.LATEST.major - 1) {
-      throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
-          + indexCreatedVersion + ".x while the current version is " + Version.LATEST
-          + " and Lucene only supports reading the current and previous major versions.");
-    }
-
-    SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
-    infos.id = id;
-    infos.generation = generation;
-    infos.lastGeneration = generation;
-    infos.luceneVersion = luceneVersion;
-
-    infos.version = input.readLong();
-    //System.out.println("READ sis version=" + infos.version);
-    if (format > VERSION_70) {
-      infos.counter = input.readVLong();
-    } else {
-      infos.counter = input.readInt();
-    }
-    int numSegments = input.readInt();
-    if (numSegments < 0) {
-      throw new CorruptIndexException("invalid segment count: " + numSegments, input);
-    }
+      if (indexCreatedVersion < Version.LATEST.major - 1) {
+        throw new IndexFormatTooOldException(input, "This index was initially created with Lucene "
+            + indexCreatedVersion + ".x while the current version is " + Version.LATEST
+            + " and Lucene only supports reading the current and previous major versions.");
+      }
 
-    if (numSegments > 0) {
-      infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
-    } else {
-      // else leave as null: no segments
-    }
+      SegmentInfos infos = new SegmentInfos(indexCreatedVersion);
+      infos.id = id;
+      infos.generation = generation;
+      infos.lastGeneration = generation;
+      infos.luceneVersion = luceneVersion;
 
-    long totalDocs = 0;
-    for (int seg = 0; seg < numSegments; seg++) {
-      String segName = input.readString();
-      byte[] segmentID = new byte[StringHelper.ID_LENGTH];
-      input.readBytes(segmentID, 0, segmentID.length);
-      Codec codec = readCodec(input);
-      SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
-      info.setCodec(codec);
-      totalDocs += info.maxDoc();
-      long delGen = input.readLong();
-      int delCount = input.readInt();
-      if (delCount < 0 || delCount > info.maxDoc()) {
-        throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
-      }
-      long fieldInfosGen = input.readLong();
-      long dvGen = input.readLong();
-      int softDelCount = format > VERSION_72 ? input.readInt() : 0;
-      if (softDelCount < 0 || softDelCount > info.maxDoc()) {
-        throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input);
+      infos.version = input.readLong();
+      //System.out.println("READ sis version=" + infos.version);
+      if (format > VERSION_70) {
+        infos.counter = input.readVLong();
+      } else {
+        infos.counter = input.readInt();
       }
-      if (softDelCount + delCount > info.maxDoc()) {
-        throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input);
+      int numSegments = input.readInt();
+      if (numSegments < 0) {
+        throw new CorruptIndexException("invalid segment count: " + numSegments, input);
       }
-      final byte[] sciId;
-      if (format > VERSION_74) {
-        byte marker = input.readByte();
-        switch (marker) {
-          case 1:
-            sciId = new byte[StringHelper.ID_LENGTH];
-            input.readBytes(sciId, 0, sciId.length);
-            break;
-          case 0:
-            sciId = null;
-            break;
-          default:
-            throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input);
-        }
+
+      if (numSegments > 0) {
+        infos.minSegmentLuceneVersion = Version.fromBits(input.readVInt(), input.readVInt(), input.readVInt());
       } else {
-        sciId = null;
+        // else leave as null: no segments
       }
-      SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId);
-      siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
-      final Map<Integer,Set<String>> dvUpdateFiles;
-      final int numDVFields = input.readInt();
-      if (numDVFields == 0) {
-        dvUpdateFiles = Collections.emptyMap();
-      } else {
-        Map<Integer,Set<String>> map = new HashMap<>(numDVFields);
-        for (int i = 0; i < numDVFields; i++) {
-          map.put(input.readInt(), input.readSetOfStrings());
+
+      long totalDocs = 0;
+      for (int seg = 0; seg < numSegments; seg++) {
+        String segName = input.readString();
+        byte[] segmentID = new byte[StringHelper.ID_LENGTH];
+        input.readBytes(segmentID, 0, segmentID.length);
+        Codec codec = readCodec(input);
+        SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
+        info.setCodec(codec);
+        totalDocs += info.maxDoc();
+        long delGen = input.readLong();
+        int delCount = input.readInt();
+        if (delCount < 0 || delCount > info.maxDoc()) {
+          throw new CorruptIndexException("invalid deletion count: " + delCount + " vs maxDoc=" + info.maxDoc(), input);
         }
-        dvUpdateFiles = Collections.unmodifiableMap(map);
-      }
-      siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
-      infos.add(siPerCommit);
+        long fieldInfosGen = input.readLong();
+        long dvGen = input.readLong();
+        int softDelCount = format > VERSION_72 ? input.readInt() : 0;
+        if (softDelCount < 0 || softDelCount > info.maxDoc()) {
+          throw new CorruptIndexException("invalid deletion count: " + softDelCount + " vs maxDoc=" + info.maxDoc(), input);
+        }
+        if (softDelCount + delCount > info.maxDoc()) {
+          throw new CorruptIndexException("invalid deletion count: " + softDelCount + delCount + " vs maxDoc=" + info.maxDoc(), input);
+        }
+        final byte[] sciId;
+        if (format > VERSION_74) {
+          byte marker = input.readByte();
+          switch (marker) {
+            case 1:
+              sciId = new byte[StringHelper.ID_LENGTH];
+              input.readBytes(sciId, 0, sciId.length);
+              break;
+            case 0:
+              sciId = null;
+              break;
+            default:
+              throw new CorruptIndexException("invalid SegmentCommitInfo ID marker: " + marker, input);
+          }
+        } else {
+          sciId = null;
+        }
+        SegmentCommitInfo siPerCommit = new SegmentCommitInfo(info, delCount, softDelCount, delGen, fieldInfosGen, dvGen, sciId);
+        siPerCommit.setFieldInfosFiles(input.readSetOfStrings());
+        final Map<Integer,Set<String>> dvUpdateFiles;
+        final int numDVFields = input.readInt();
+        if (numDVFields == 0) {
+          dvUpdateFiles = Collections.emptyMap();
+        } else {
+          Map<Integer,Set<String>> map = new HashMap<>(numDVFields);
+          for (int i = 0; i < numDVFields; i++) {
+            map.put(input.readInt(), input.readSetOfStrings());
+          }
+          dvUpdateFiles = Collections.unmodifiableMap(map);
+        }
+        siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
+        infos.add(siPerCommit);
 
-      Version segmentVersion = info.getVersion();
+        Version segmentVersion = info.getVersion();
 
-      if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
-        throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
-      }
+        if (segmentVersion.onOrAfter(infos.minSegmentLuceneVersion) == false) {
+          throw new CorruptIndexException("segments file recorded minSegmentLuceneVersion=" + infos.minSegmentLuceneVersion + " but segment=" + info + " has older version=" + segmentVersion, input);
+        }
 
-      if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
-        throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
-      }
+        if (infos.indexCreatedVersionMajor >= 7 && segmentVersion.major < infos.indexCreatedVersionMajor) {
+          throw new CorruptIndexException("segments file recorded indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor + " but segment=" + info + " has older version=" + segmentVersion, input);
+        }
 
-      if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
-        throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
+        if (infos.indexCreatedVersionMajor >= 7 && info.getMinVersion() == null) {
+          throw new CorruptIndexException("segments infos must record minVersion with indexCreatedVersionMajor=" + infos.indexCreatedVersionMajor, input);
+        }
       }
-    }
 
-    infos.userData = input.readMapOfStrings();
+      infos.userData = input.readMapOfStrings();
 
-    CodecUtil.checkFooter(input);
+      // LUCENE-6299: check we are in bounds
+      if (totalDocs > IndexWriter.getActualMaxDocs()) {
+        throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input);
+      }
 
-    // LUCENE-6299: check we are in bounds
-    if (totalDocs > IndexWriter.getActualMaxDocs()) {
-      throw new CorruptIndexException("Too many documents: an index cannot exceed " + IndexWriter.getActualMaxDocs() + " but readers have total maxDoc=" + totalDocs, input);
+      return infos;
+    } catch (Throwable t) {
+      priorE = t;
+    } finally {
+      CodecUtil.checkFooter(input, priorE);
     }
-
-    return infos;
+    throw new Error("Unreachable code");
   }
 
   private static Codec readCodec(DataInput input) throws IOException {
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
index 19d8214..23c98ad 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
@@ -18,12 +18,16 @@ package org.apache.lucene.index;
 
 
 import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.search.Sort;
 import org.apache.lucene.store.BaseDirectoryWrapper;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.StringHelper;
+import org.apache.lucene.util.TestUtil;
 import org.apache.lucene.util.Version;
 
 import java.io.IOException;
@@ -178,5 +182,62 @@ public class TestSegmentInfos extends LuceneTestCase {
       assertEquals("clone changed but shouldn't", StringHelper.idToString(id), StringHelper.idToString(clone.getId()));
     }
   }
+
+  public void testBitFlippedTriggersCorruptIndexException() throws IOException {
+    BaseDirectoryWrapper dir = newDirectory();
+    dir.setCheckIndexOnClose(false);
+    byte id[] = StringHelper.randomId();
+    Codec codec = Codec.getDefault();
+
+    SegmentInfos sis = new SegmentInfos(Version.LATEST.major);
+    SegmentInfo info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_0", 1, false, Codec.getDefault(),
+                                       Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
+    info.setFiles(Collections.<String>emptySet());
+    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
+    SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, 0, -1, -1, -1, StringHelper.randomId());
+    sis.add(commitInfo);
+
+    info = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "_1", 1, false, Codec.getDefault(),
+                           Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap(), null);
+    info.setFiles(Collections.<String>emptySet());
+    codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
+    commitInfo = new SegmentCommitInfo(info, 0, 0,-1, -1, -1, StringHelper.randomId());
+    sis.add(commitInfo);
+
+    sis.commit(dir);
+
+    BaseDirectoryWrapper corruptDir = newDirectory();
+    corruptDir.setCheckIndexOnClose(false);
+    boolean corrupt = false;
+    for (String file : dir.listAll()) {
+      if (file.startsWith(IndexFileNames.SEGMENTS)) {
+        try (IndexInput in = dir.openInput(file, IOContext.DEFAULT);
+            IndexOutput out = corruptDir.createOutput(file, IOContext.DEFAULT)) {
+          final long corruptIndex = TestUtil.nextLong(random(), 0, in.length() - 1);
+          out.copyBytes(in, corruptIndex);
+          final int b = Byte.toUnsignedInt(in.readByte()) + TestUtil.nextInt(random(), 0x01, 0xff);
+          out.writeByte((byte) b);
+          out.copyBytes(in, in.length() - in.getFilePointer());
+        }
+        try (IndexInput in = corruptDir.openInput(file, IOContext.DEFAULT)) {
+          CodecUtil.checksumEntireFile(in);
+          if (VERBOSE) {
+            System.out.println("TEST: Altering the file did not update the checksum, aborting...");
+          }
+          return;
+        } catch (CorruptIndexException e) {
+          // ok
+        }
+        corrupt = true;
+      } else if (slowFileExists(corruptDir, file) == false) { // extraFS
+        corruptDir.copyFrom(dir, file, file, IOContext.DEFAULT);
+      }
+    }
+    assertTrue("No segments file found", corrupt);
+
+    expectThrows(CorruptIndexException.class, () -> SegmentInfos.readLatestCommit(corruptDir));
+    dir.close();
+    corruptDir.close();
+  }
 }
 


[lucene-solr] 47/47: Fix case where mergeOnCommit would attempt to delete files twice in the presence of deletions

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

sokolov pushed a commit to branch jira/lucene-8962
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 107fc173f6d2e732a2b0d2af97aa98ef591ffc05
Author: Michael Sokolov <so...@amazon.com>
AuthorDate: Wed Jun 3 15:12:02 2020 -0400

    Fix case where mergeOnCommit would attempt to delete files twice in the presence of deletions
---
 .../java/org/apache/lucene/index/IndexWriter.java  |   3 +-
 .../org/apache/lucene/index/TestIndexWriter.java   | 141 +++++++++++++--------
 2 files changed, 91 insertions(+), 53 deletions(-)

diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index 88fdb90..13e0443 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -3169,13 +3169,13 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
           // Resolve "live" SegmentInfos segments to their toCommit cloned equivalents, based on segment name.
           Set<String> mergedSegmentNames = new HashSet<>();
           for (SegmentCommitInfo sci : this.segments) {
-            deleter.decRef(sci.files());
             mergedSegmentNames.add(sci.info.name);
           }
           List<SegmentCommitInfo> toCommitMergedAwaySegments = new ArrayList<>();
           for (SegmentCommitInfo sci : toCommit) {
             if (mergedSegmentNames.contains(sci.info.name)) {
               toCommitMergedAwaySegments.add(sci);
+              deleter.decRef(sci.files());
             }
           }
           // Construct a OneMerge that applies to toCommit
@@ -4593,6 +4593,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable,
         // Merge would produce a 0-doc segment, so we do nothing except commit the merge to remove all the 0-doc segments that we "merged":
         assert merge.info.info.maxDoc() == 0;
         commitMerge(merge, mergeState);
+        success = true;
         return 0;
       }
 
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index 8fb1ce5..7590b1a 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -344,7 +344,7 @@ public class TestIndexWriter extends LuceneTestCase {
   // Make sure it's OK to change RAM buffer size and
   // maxBufferedDocs in a write session
   public void testChangingRAMBuffer() throws IOException {
-    Directory dir = newDirectory();      
+    Directory dir = newDirectory();
     IndexWriter writer  = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
     writer.getConfig().setMaxBufferedDocs(10);
     writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@@ -607,7 +607,7 @@ public class TestIndexWriter extends LuceneTestCase {
           doc.add(newField("content4", contents, customType));
           type = customType;
         } else
-          type = TextField.TYPE_NOT_STORED; 
+          type = TextField.TYPE_NOT_STORED;
         doc.add(newTextField("content1", contents, Field.Store.NO));
         doc.add(newField("content3", "", customType));
         doc.add(newField("content5", "", type));
@@ -663,13 +663,13 @@ public class TestIndexWriter extends LuceneTestCase {
     writer.close();
     dir.close();
   }
-  
+
   public void testEmptyFieldNameTerms() throws IOException {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
     Document doc = new Document();
     doc.add(newTextField("", "a b c", Field.Store.NO));
-    writer.addDocument(doc);  
+    writer.addDocument(doc);
     writer.close();
     DirectoryReader reader = DirectoryReader.open(dir);
     LeafReader subreader = getOnlyLeafReader(reader);
@@ -681,7 +681,7 @@ public class TestIndexWriter extends LuceneTestCase {
     reader.close();
     dir.close();
   }
-  
+
   public void testEmptyFieldNameWithEmptyTerm() throws IOException {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
@@ -690,7 +690,7 @@ public class TestIndexWriter extends LuceneTestCase {
     doc.add(newStringField("", "a", Field.Store.NO));
     doc.add(newStringField("", "b", Field.Store.NO));
     doc.add(newStringField("", "c", Field.Store.NO));
-    writer.addDocument(doc);  
+    writer.addDocument(doc);
     writer.close();
     DirectoryReader reader = DirectoryReader.open(dir);
     LeafReader subreader = getOnlyLeafReader(reader);
@@ -834,7 +834,7 @@ public class TestIndexWriter extends LuceneTestCase {
     customType.setStoreTermVectors(true);
     customType.setStoreTermVectorPositions(true);
     customType.setStoreTermVectorOffsets(true);
-    
+
     doc.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType));
     writer.addDocument(doc);
     writer.addDocument(doc);
@@ -922,7 +922,7 @@ public class TestIndexWriter extends LuceneTestCase {
 
       // open/close slowly sometimes
       dir.setUseSlowOpenClosers(true);
-      
+
       // throttle a little
       dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
 
@@ -1148,7 +1148,7 @@ public class TestIndexWriter extends LuceneTestCase {
 
     FieldType customType = new FieldType(StoredField.TYPE);
     customType.setTokenized(true);
-    
+
     Field f = new Field("binary", b, 10, 17, customType);
     // TODO: this is evil, changing the type after creating the field:
     customType.setIndexOptions(IndexOptions.DOCS);
@@ -1157,7 +1157,7 @@ public class TestIndexWriter extends LuceneTestCase {
     f.setTokenStream(doc1field1);
 
     FieldType customType2 = new FieldType(TextField.TYPE_STORED);
-    
+
     Field f2 = newField("string", "value", customType2);
     final MockTokenizer doc1field2 = new MockTokenizer(MockTokenizer.WHITESPACE, false);
     doc1field2.setReader(new StringReader("doc1field2"));
@@ -1233,7 +1233,7 @@ public class TestIndexWriter extends LuceneTestCase {
   public void testDeleteUnusedFiles() throws Exception {
     assumeFalse("test relies on exact filenames", Codec.getDefault() instanceof SimpleTextCodec);
     assumeWorkingMMapOnWindows();
-    
+
     for(int iter=0;iter<2;iter++) {
       // relies on windows semantics
       Path path = createTempDir();
@@ -1250,7 +1250,7 @@ public class TestIndexWriter extends LuceneTestCase {
       }
 
       MergePolicy mergePolicy = newLogMergePolicy(true);
-      
+
       // This test expects all of its segments to be in CFS
       mergePolicy.setNoCFSRatio(1.0);
       mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
@@ -1338,7 +1338,7 @@ public class TestIndexWriter extends LuceneTestCase {
     customType.setStoreTermVectors(true);
     customType.setStoreTermVectorPositions(true);
     customType.setStoreTermVectorOffsets(true);
-    
+
     doc.add(newField("c", "val", customType));
     writer.addDocument(doc);
     writer.commit();
@@ -1379,7 +1379,7 @@ public class TestIndexWriter extends LuceneTestCase {
     // indexed, flushed (but not committed) and then IW rolls back, then no
     // files are left in the Directory.
     Directory dir = newDirectory();
-    
+
     String[] origFiles = dir.listAll();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
                                                 .setMaxBufferedDocs(2)
@@ -1409,8 +1409,8 @@ public class TestIndexWriter extends LuceneTestCase {
     // Adding just one document does not call flush yet.
     int computedExtraFileCount = 0;
     for (String file : dir.listAll()) {
-      if (IndexWriter.WRITE_LOCK_NAME.equals(file) || 
-          file.startsWith(IndexFileNames.SEGMENTS) || 
+      if (IndexWriter.WRITE_LOCK_NAME.equals(file) ||
+          file.startsWith(IndexFileNames.SEGMENTS) ||
           IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
         if (file.lastIndexOf('.') < 0
             // don't count stored fields and term vectors in, or any temporary files they might
@@ -1458,7 +1458,7 @@ public class TestIndexWriter extends LuceneTestCase {
     FieldType customType3 = new FieldType(TextField.TYPE_STORED);
     customType3.setTokenized(false);
     customType3.setOmitNorms(true);
-    
+
     for (int i=0; i<2; i++) {
       Document doc = new Document();
       doc.add(new Field("id", Integer.toString(i)+BIG, customType3));
@@ -1478,7 +1478,7 @@ public class TestIndexWriter extends LuceneTestCase {
       SegmentReader sr = (SegmentReader) ctx.reader();
       assertFalse(sr.getFieldInfos().hasVectors());
     }
-    
+
     r0.close();
     dir.close();
   }
@@ -1501,7 +1501,7 @@ public class TestIndexWriter extends LuceneTestCase {
 
     @Override
     public final boolean incrementToken() {
-      clearAttributes();      
+      clearAttributes();
       if (upto < tokens.length) {
         termAtt.setEmpty();
         termAtt.append(tokens[upto]);
@@ -1724,7 +1724,7 @@ public class TestIndexWriter extends LuceneTestCase {
     r.close();
     dir.close();
   }
-  
+
   public void testDontInvokeAnalyzerForUnAnalyzedFields() throws Exception {
     Analyzer analyzer = new Analyzer() {
       @Override
@@ -1759,13 +1759,13 @@ public class TestIndexWriter extends LuceneTestCase {
     w.close();
     dir.close();
   }
-  
+
   //LUCENE-1468 -- make sure opening an IndexWriter with
   // create=true does not remove non-index files
-  
+
   public void testOtherFiles() throws Throwable {
     Directory dir = newDirectory();
-    IndexWriter iw = new IndexWriter(dir, 
+    IndexWriter iw = new IndexWriter(dir,
         newIndexWriterConfig(new MockAnalyzer(random())));
     iw.addDocument(new Document());
     iw.close();
@@ -1774,15 +1774,15 @@ public class TestIndexWriter extends LuceneTestCase {
       IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random()));
       out.writeByte((byte) 42);
       out.close();
-      
+
       new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).close();
-      
+
       assertTrue(slowFileExists(dir, "myrandomfile"));
     } finally {
       dir.close();
     }
   }
-  
+
   // LUCENE-3849
   public void testStopwordsPosIncHole() throws Exception {
     Directory dir = newDirectory();
@@ -1811,7 +1811,7 @@ public class TestIndexWriter extends LuceneTestCase {
     ir.close();
     dir.close();
   }
-  
+
   // LUCENE-3849
   public void testStopwordsPosIncHole2() throws Exception {
     // use two stopfilters for testing here
@@ -1843,23 +1843,23 @@ public class TestIndexWriter extends LuceneTestCase {
     ir.close();
     dir.close();
   }
-  
+
   // LUCENE-4575
   public void testCommitWithUserDataOnly() throws Exception {
     Directory dir = newDirectory();
     IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null));
     writer.commit(); // first commit to complete IW create transaction.
-    
+
     // this should store the commit data, even though no other changes were made
     writer.setLiveCommitData(new HashMap<String,String>() {{
       put("key", "value");
     }}.entrySet());
     writer.commit();
-    
+
     DirectoryReader r = DirectoryReader.open(dir);
     assertEquals("value", r.getIndexCommit().getUserData().get("key"));
     r.close();
-    
+
     // now check setCommitData and prepareCommit/commit sequence
     writer.setLiveCommitData(new HashMap<String,String>() {{
       put("key", "value1");
@@ -1873,7 +1873,7 @@ public class TestIndexWriter extends LuceneTestCase {
     r = DirectoryReader.open(dir);
     assertEquals("value1", r.getIndexCommit().getUserData().get("key"));
     r.close();
-    
+
     // now should commit the second commitData - there was a bug where 
     // IndexWriter.finishCommit overrode the second commitData
     writer.commit();
@@ -1881,7 +1881,7 @@ public class TestIndexWriter extends LuceneTestCase {
     assertEquals("IndexWriter.finishCommit may have overridden the second commitData",
         "value2", r.getIndexCommit().getUserData().get("key"));
     r.close();
-    
+
     writer.close();
     dir.close();
   }
@@ -1896,7 +1896,7 @@ public class TestIndexWriter extends LuceneTestCase {
     }
     return data;
   }
-  
+
   @Test
   public void testGetCommitData() throws Exception {
     Directory dir = newDirectory();
@@ -1906,16 +1906,16 @@ public class TestIndexWriter extends LuceneTestCase {
     }}.entrySet());
     assertEquals("value", getLiveCommitData(writer).get("key"));
     writer.close();
-    
+
     // validate that it's also visible when opening a new IndexWriter
     writer = new IndexWriter(dir, newIndexWriterConfig(null)
                                     .setOpenMode(OpenMode.APPEND));
     assertEquals("value", getLiveCommitData(writer).get("key"));
     writer.close();
-    
+
     dir.close();
   }
-  
+
   public void testNullAnalyzer() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig iwConf = newIndexWriterConfig(null);
@@ -1942,7 +1942,7 @@ public class TestIndexWriter extends LuceneTestCase {
     iw.close();
     dir.close();
   }
-  
+
   public void testNullDocument() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
@@ -1967,7 +1967,7 @@ public class TestIndexWriter extends LuceneTestCase {
     iw.close();
     dir.close();
   }
-  
+
   public void testNullDocuments() throws IOException {
     Directory dir = newDirectory();
     RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
@@ -1992,7 +1992,7 @@ public class TestIndexWriter extends LuceneTestCase {
     iw.close();
     dir.close();
   }
-  
+
   public void testIterableFieldThrowsException() throws IOException {
     Directory dir = newDirectory();
     IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
@@ -2000,7 +2000,7 @@ public class TestIndexWriter extends LuceneTestCase {
     int docCount = 0;
     int docId = 0;
     Set<String> liveIds = new HashSet<>();
-    for (int i = 0; i < iters; i++) {      
+    for (int i = 0; i < iters; i++) {
       int numDocs = atLeast(4);
       for (int j = 0; j < numDocs; j++) {
         String id = Integer.toString(docId++);
@@ -2008,7 +2008,7 @@ public class TestIndexWriter extends LuceneTestCase {
         fields.add(new StringField("id", id, Field.Store.YES));
         fields.add(new StringField("foo", TestUtil.randomSimpleString(random()), Field.Store.NO));
         docId++;
-        
+
         boolean success = false;
         try {
           w.addDocument(new RandomFailingIterable<IndexableField>(fields, random()));
@@ -2040,7 +2040,7 @@ public class TestIndexWriter extends LuceneTestCase {
     w.close();
     IOUtils.close(reader, dir);
   }
-  
+
   public void testIterableThrowsException() throws IOException {
     Directory dir = newDirectory();
     IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
@@ -2088,7 +2088,7 @@ public class TestIndexWriter extends LuceneTestCase {
     w.close();
     IOUtils.close(reader, dir);
   }
-  
+
   public void testIterableThrowsException2() throws IOException {
     Directory dir = newDirectory();
     IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
@@ -2128,7 +2128,7 @@ public class TestIndexWriter extends LuceneTestCase {
       this.list = list;
       this.failOn = random.nextInt(5);
     }
-    
+
     @Override
     public Iterator<T> iterator() {
       final Iterator<? extends T> docIter = list.iterator();
@@ -2254,7 +2254,7 @@ public class TestIndexWriter extends LuceneTestCase {
     writer.close();
     dir.close();
   }
-  
+
   public void testMergeAllDeleted() throws IOException {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
@@ -2477,12 +2477,12 @@ public class TestIndexWriter extends LuceneTestCase {
     IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
     w.addDocument(new Document());
     w.close();
-    
+
     SegmentInfos sis = SegmentInfos.readLatestCommit(d);
     byte[] id1 = sis.getId();
     assertNotNull(id1);
     assertEquals(StringHelper.ID_LENGTH, id1.length);
-    
+
     byte[] id2 = sis.info(0).info.getId();
     byte[] sciId2 = sis.info(0).getId();
     assertNotNull(id2);
@@ -2514,7 +2514,7 @@ public class TestIndexWriter extends LuceneTestCase {
       ids.add(id);
     }
   }
-  
+
   public void testEmptyNorm() throws Exception {
     Directory d = newDirectory();
     IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
@@ -2579,7 +2579,7 @@ public class TestIndexWriter extends LuceneTestCase {
     assertEquals(1, r2.getIndexCommit().getGeneration());
     assertEquals("segments_1", r2.getIndexCommit().getSegmentsFileName());
     r2.close();
-    
+
     // make a change and another commit
     w.addDocument(new Document());
     w.commit();
@@ -2866,7 +2866,7 @@ public class TestIndexWriter extends LuceneTestCase {
     IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
     IndexWriter w = new IndexWriter(dir, iwc);
     w.close();
-    
+
     IndexOutput out = dir.createTempOutput("_0", "bkd", IOContext.DEFAULT);
     String tempName = out.getName();
     out.close();
@@ -3151,7 +3151,7 @@ public class TestIndexWriter extends LuceneTestCase {
     expectThrows(IllegalArgumentException.class, () -> {
       writer.softUpdateDocument(null, new Document(), new NumericDocValuesField("soft_delete", 1));
     });
-    
+
     expectThrows(IllegalArgumentException.class, () -> {
       writer.softUpdateDocument(new Term("id", "1"), new Document());
     });
@@ -4167,4 +4167,41 @@ public class TestIndexWriter extends LuceneTestCase {
       }
     }
   }
+
+  public void testMergeOnCommitKeepFullyDeletedSegments() throws Exception {
+    Directory dir = newDirectory();
+    IndexWriterConfig iwc = newIndexWriterConfig();
+    iwc.mergePolicy = new FilterMergePolicy(newMergePolicy()) {
+      @Override
+      public boolean keepFullyDeletedSegment(IOSupplier<CodecReader> readerIOSupplier) {
+        return true;
+      }
+
+      @Override
+      public MergeSpecification findFullFlushMerges(MergeTrigger mergeTrigger,
+                                                    SegmentInfos segmentInfos,
+                                                    MergeContext mergeContext) {
+        List<SegmentCommitInfo> fullyDeletedSegments = segmentInfos.asList().stream()
+                .filter(s -> s.info.maxDoc() - s.getDelCount() == 0)
+                .collect(Collectors.toList());
+        if (fullyDeletedSegments.isEmpty()) {
+          return null;
+        }
+        MergeSpecification spec = new MergeSpecification();
+        spec.add(new OneMerge(fullyDeletedSegments));
+        return spec;
+      }
+    };
+    IndexWriter w = new IndexWriter(dir, iwc);
+    Document d = new Document();
+    d.add(new StringField("id", "1", Field.Store.YES));
+    w.addDocument(d);
+    w.commit();
+    w.updateDocument(new Term("id", "1"), d);
+    w.commit();
+    try (DirectoryReader reader = w.getReader()) {
+      assertEquals(1, reader.numDocs());
+    }
+    IOUtils.close(w, dir);
+  }
 }