You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by to...@apache.org on 2022/07/02 00:33:06 UTC

[lucene-jira-archive] 03/03: add attachments (partial)

This is an automated email from the ASF dual-hosted git repository.

tomoko pushed a commit to branch attachments
in repository https://gitbox.apache.org/repos/asf/lucene-jira-archive.git

commit 84ec57310e6d2fce9bb76a4489fc0a7d828e1c1a
Author: Tomoko Uchida <to...@gmail.com>
AuthorDate: Sat Jul 2 09:32:50 2022 +0900

    add attachments (partial)
---
 attachments/LUCENE-10515/FloatDocValuesStored.png  | Bin 0 -> 40238 bytes
 attachments/LUCENE-10515/FloatFieldStored.png      | Bin 0 -> 54921 bytes
 .../LUCENE-10515/Lucene50DocValuesProducer.png     | Bin 0 -> 401862 bytes
 attachments/LUCENE-10515/LuenceSortFloatIssue.zip  | Bin 0 -> 16432 bytes
 attachments/LUCENE-10515/screenshot-1.png          | Bin 0 -> 278629 bytes
 .../LUCENE-10519/image-2022-04-27-16-40-34-796.png | Bin 0 -> 227408 bytes
 .../LUCENE-10519/image-2022-04-27-16-40-58-056.png | Bin 0 -> 227408 bytes
 .../LUCENE-10519/image-2022-04-27-16-41-55-264.png | Bin 0 -> 164677 bytes
 .../LUCENE-10520/HTMLStripCharFilterTest.java      |  26 ++
 .../Screen Shot 2022-05-18 at 4.26.14 PM.png       | Bin 0 -> 396692 bytes
 .../Screen Shot 2022-05-18 at 4.26.24 PM.png       | Bin 0 -> 110277 bytes
 .../Screen Shot 2022-05-18 at 4.27.37 PM.png       | Bin 0 -> 114010 bytes
 .../LUCENE-10527/image-2022-04-20-14-53-58-484.png | Bin 0 -> 29129 bytes
 attachments/LUCENE-10534/flamegraph.png            | Bin 0 -> 508725 bytes
 .../LUCENE-10534/flamegraph_getValueForDoc.png     | Bin 0 -> 547901 bytes
 .../LUCENE-10542/flamegraph_getValueForDoc.png     | Bin 0 -> 547901 bytes
 .../Screen_Shot_2022-04-30_at_01.15.00.png         | Bin 0 -> 37557 bytes
 attachments/LUCENE-10551/LUCENE-10551-test.patch   |  30 ++
 attachments/LUCENE-10560/profile.png               | Bin 0 -> 67963 bytes
 attachments/LUCENE-10563/polygon-1.json            | 321 +++++++++++++
 attachments/LUCENE-10563/polygon-2.json            | 171 +++++++
 attachments/LUCENE-10563/polygon-3.json            | 169 +++++++
 attachments/LUCENE-10568/Image 007.png             | Bin 0 -> 103181 bytes
 attachments/LUCENE-10570/LUCENE-10570.patch        | 181 ++++++++
 attachments/LUCENE-10571/LUCENE-10571.patch        | 498 +++++++++++++++++++++
 .../Screen Shot 2022-05-16 at 10.28.22 AM.png      | Bin 0 -> 26338 bytes
 attachments/LUCENE-10579/backwards.log.gz          | Bin 0 -> 1532855 bytes
 27 files changed, 1396 insertions(+)

diff --git a/attachments/LUCENE-10515/FloatDocValuesStored.png b/attachments/LUCENE-10515/FloatDocValuesStored.png
new file mode 100644
index 00000000..7b205cce
Binary files /dev/null and b/attachments/LUCENE-10515/FloatDocValuesStored.png differ
diff --git a/attachments/LUCENE-10515/FloatFieldStored.png b/attachments/LUCENE-10515/FloatFieldStored.png
new file mode 100644
index 00000000..2c3b64c6
Binary files /dev/null and b/attachments/LUCENE-10515/FloatFieldStored.png differ
diff --git a/attachments/LUCENE-10515/Lucene50DocValuesProducer.png b/attachments/LUCENE-10515/Lucene50DocValuesProducer.png
new file mode 100644
index 00000000..de1d0057
Binary files /dev/null and b/attachments/LUCENE-10515/Lucene50DocValuesProducer.png differ
diff --git a/attachments/LUCENE-10515/LuenceSortFloatIssue.zip b/attachments/LUCENE-10515/LuenceSortFloatIssue.zip
new file mode 100644
index 00000000..d3526bff
Binary files /dev/null and b/attachments/LUCENE-10515/LuenceSortFloatIssue.zip differ
diff --git a/attachments/LUCENE-10515/screenshot-1.png b/attachments/LUCENE-10515/screenshot-1.png
new file mode 100644
index 00000000..b8c90a66
Binary files /dev/null and b/attachments/LUCENE-10515/screenshot-1.png differ
diff --git a/attachments/LUCENE-10519/image-2022-04-27-16-40-34-796.png b/attachments/LUCENE-10519/image-2022-04-27-16-40-34-796.png
new file mode 100644
index 00000000..0089669c
Binary files /dev/null and b/attachments/LUCENE-10519/image-2022-04-27-16-40-34-796.png differ
diff --git a/attachments/LUCENE-10519/image-2022-04-27-16-40-58-056.png b/attachments/LUCENE-10519/image-2022-04-27-16-40-58-056.png
new file mode 100644
index 00000000..0089669c
Binary files /dev/null and b/attachments/LUCENE-10519/image-2022-04-27-16-40-58-056.png differ
diff --git a/attachments/LUCENE-10519/image-2022-04-27-16-41-55-264.png b/attachments/LUCENE-10519/image-2022-04-27-16-41-55-264.png
new file mode 100644
index 00000000..0a9547bc
Binary files /dev/null and b/attachments/LUCENE-10519/image-2022-04-27-16-41-55-264.png differ
diff --git a/attachments/LUCENE-10520/HTMLStripCharFilterTest.java b/attachments/LUCENE-10520/HTMLStripCharFilterTest.java
new file mode 100644
index 00000000..f7445a68
--- /dev/null
+++ b/attachments/LUCENE-10520/HTMLStripCharFilterTest.java
@@ -0,0 +1,26 @@
+import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.StringWriter;
+
+import static org.junit.Assert.assertEquals;
+
+public class HTMLStripCharFilterTest {
+
+	private static final String HTML_TEXT = "<!DOCTYPE html><html lang=\"en\"><head><title>Test</title></head><body><p class=\"foo>bar\" id=\"baz\">Some text.</p></body></html>";
+
+	@Test
+	public void test()
+			  throws IOException
+	{
+		Reader reader = new StringReader(HTML_TEXT);
+		HTMLStripCharFilter filter = new HTMLStripCharFilter(reader);
+		StringWriter result = new StringWriter();
+		filter.transferTo(result);
+		assertEquals("Test\n\n\n\nSome text.", result.toString().trim());
+	}
+
+}
\ No newline at end of file
diff --git a/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.26.14 PM.png b/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.26.14 PM.png
new file mode 100644
index 00000000..70695967
Binary files /dev/null and b/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.26.14 PM.png differ
diff --git a/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.26.24 PM.png b/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.26.24 PM.png
new file mode 100644
index 00000000..436723d7
Binary files /dev/null and b/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.26.24 PM.png differ
diff --git a/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.27.37 PM.png b/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.27.37 PM.png
new file mode 100644
index 00000000..60531c7e
Binary files /dev/null and b/attachments/LUCENE-10527/Screen Shot 2022-05-18 at 4.27.37 PM.png differ
diff --git a/attachments/LUCENE-10527/image-2022-04-20-14-53-58-484.png b/attachments/LUCENE-10527/image-2022-04-20-14-53-58-484.png
new file mode 100644
index 00000000..65ddaecb
Binary files /dev/null and b/attachments/LUCENE-10527/image-2022-04-20-14-53-58-484.png differ
diff --git a/attachments/LUCENE-10534/flamegraph.png b/attachments/LUCENE-10534/flamegraph.png
new file mode 100644
index 00000000..41f84a2b
Binary files /dev/null and b/attachments/LUCENE-10534/flamegraph.png differ
diff --git a/attachments/LUCENE-10534/flamegraph_getValueForDoc.png b/attachments/LUCENE-10534/flamegraph_getValueForDoc.png
new file mode 100644
index 00000000..b8f4d45e
Binary files /dev/null and b/attachments/LUCENE-10534/flamegraph_getValueForDoc.png differ
diff --git a/attachments/LUCENE-10542/flamegraph_getValueForDoc.png b/attachments/LUCENE-10542/flamegraph_getValueForDoc.png
new file mode 100644
index 00000000..b8f4d45e
Binary files /dev/null and b/attachments/LUCENE-10542/flamegraph_getValueForDoc.png differ
diff --git a/attachments/LUCENE-10543/Screen_Shot_2022-04-30_at_01.15.00.png b/attachments/LUCENE-10543/Screen_Shot_2022-04-30_at_01.15.00.png
new file mode 100644
index 00000000..cf700689
Binary files /dev/null and b/attachments/LUCENE-10543/Screen_Shot_2022-04-30_at_01.15.00.png differ
diff --git a/attachments/LUCENE-10551/LUCENE-10551-test.patch b/attachments/LUCENE-10551/LUCENE-10551-test.patch
new file mode 100644
index 00000000..526f55b9
--- /dev/null
+++ b/attachments/LUCENE-10551/LUCENE-10551-test.patch
@@ -0,0 +1,30 @@
+diff --git a/lucene/core/src/test/org/apache/lucene/util/compress/TestLowercaseAsciiCompression.java b/lucene/core/src/test/org/apache/lucene/util/compress/TestLowercaseAsciiCompression.java
+index fd8374575e8..16012c00e15 100644
+--- a/lucene/core/src/test/org/apache/lucene/util/compress/TestLowercaseAsciiCompression.java
++++ b/lucene/core/src/test/org/apache/lucene/util/compress/TestLowercaseAsciiCompression.java
+@@ -19,8 +19,9 @@ package org.apache.lucene.util.compress;
+ import java.io.IOException;
+ import java.util.stream.Collectors;
+ import java.util.stream.IntStream;
++
+ import org.apache.lucene.store.ByteBuffersDataOutput;
+-import org.apache.lucene.tests.util.LuceneTestCase;
++ import org.apache.lucene.tests.util.LuceneTestCase;
+ import org.apache.lucene.tests.util.TestUtil;
+ import org.apache.lucene.util.ArrayUtil;
+ 
+@@ -57,6 +58,14 @@ public class TestLowercaseAsciiCompression extends LuceneTestCase {
+     assertTrue(doTestCompress("ab1.dEfg427hiogchio:'nwm un!94twxz".getBytes("UTF-8")));
+   }
+ 
++  public void testNotReallySimple() throws Exception {
++    doTestCompress("cion1cion_desarrollociones_oraclecionesnaturacionesnatura2tedppsa-integrationdemotiontion cloud gen2tion instance - dev1tion instance - testtion-devbtion-instancetion-prdtion-promerication-qation064533tion535217tion697401tion761348tion892818tion_matrationcauto_simmonsintgic_testtioncloudprodictioncloudservicetiongateway10tioninstance-jtsundatamartprd??o".getBytes("UTF-8"));
++  }
++
++  public void testNotReallySimple2() throws Exception {
++    doTestCompress("analytics-platform-test/koala/cluster-tool:1.0-20220310151438.492,mesh_istio_examples-bookinfo-details-v1:1.16.2mesh_istio_examples-bookinfo-reviews-v3:1.16.2oce-clamav:1.0.219oce-tesseract:1.0.7oce-traefik:2.5.1oci-opensearch:1.2.4.8.103oda-digital-assistant-control-plane-train-pool-workflow-v6:22.02.14oke-coresvcs-k8s-dns-dnsmasq-nanny-amd64@sha256:41aa9160ceeaf712369ddb660d02e5ec06d1679965e6930351967c8cf5ed62d4oke-coresvcs-k8s-dns-kube-dns-amd64@sha256:2cf34b04106 [...]
++  }
++
+   public void testFarAwayExceptions() throws Exception {
+     String s =
+         "01W" + IntStream.range(0, 300).mapToObj(i -> "a").collect(Collectors.joining()) + "W.";
diff --git a/attachments/LUCENE-10560/profile.png b/attachments/LUCENE-10560/profile.png
new file mode 100644
index 00000000..34364fa8
Binary files /dev/null and b/attachments/LUCENE-10560/profile.png differ
diff --git a/attachments/LUCENE-10563/polygon-1.json b/attachments/LUCENE-10563/polygon-1.json
new file mode 100644
index 00000000..8816fb4d
--- /dev/null
+++ b/attachments/LUCENE-10563/polygon-1.json
@@ -0,0 +1,321 @@
+{
+  "type": "Polygon",
+  "coordinates": [
+    [
+      [
+        -8.04103902110225,
+        -2.5255689071311807
+      ],
+      [
+        -8.04103902110225,
+        -2.5258466849089523
+      ],
+      [
+        -8.0415945766578,
+        -2.5258466849089523
+      ],
+      [
+        -8.0415945766578,
+        -2.526124462686731
+      ],
+      [
+        -8.041872354435583,
+        -2.526124462686731
+      ],
+      [
+        -8.041872354435583,
+        -2.5272355737978422
+      ],
+      [
+        -8.041316798880022,
+        -2.5272355737978422
+      ],
+      [
+        -8.041316798880022,
+        -2.527513351575621
+      ],
+      [
+        -8.04103902110225,
+        -2.527513351575621
+      ],
+      [
+        -8.04103902110225,
+        -2.5272355737978422
+      ],
+      [
+        -8.040761243324472,
+        -2.5272355737978422
+      ],
+      [
+        -8.040761243324472,
+        -2.527513351575621
+      ],
+      [
+        -8.040483465546693,
+        -2.527513351575621
+      ],
+      [
+        -8.040483465546693,
+        -2.5277911293533997
+      ],
+      [
+        -8.04020568776891,
+        -2.5277911293533997
+      ],
+      [
+        -8.04020568776891,
+        -2.5280689071311713
+      ],
+      [
+        -8.039927909991132,
+        -2.5280689071311713
+      ],
+      [
+        -8.039927909991132,
+        -2.5277911293533997
+      ],
+      [
+        -8.03965013221336,
+        -2.5277911293533997
+      ],
+      [
+        -8.03965013221336,
+        -2.5272355737978422
+      ],
+      [
+        -8.039372354435582,
+        -2.5272355737978422
+      ],
+      [
+        -8.039372354435582,
+        -2.526680018242292
+      ],
+      [
+        -8.03965013221336,
+        -2.526680018242292
+      ],
+      [
+        -8.03965013221336,
+        -2.526124462686731
+      ],
+      [
+        -8.039372354435582,
+        -2.526124462686731
+      ],
+      [
+        -8.039372354435582,
+        -2.5255689071311807
+      ],
+      [
+        -8.039094576657803,
+        -2.5255689071311807
+      ],
+      [
+        -8.039094576657803,
+        -2.525291129353402
+      ],
+      [
+        -8.03881679888002,
+        -2.525291129353402
+      ],
+      [
+        -8.03881679888002,
+        -2.5250133515756197
+      ],
+      [
+        -8.03742790999113,
+        -2.5250133515756197
+      ],
+      [
+        -8.03742790999113,
+        -2.5241800182422907
+      ],
+      [
+        -8.037705687768913,
+        -2.5241800182422907
+      ],
+      [
+        -8.037705687768913,
+        -2.523902240464512
+      ],
+      [
+        -8.037983465546692,
+        -2.523902240464512
+      ],
+      [
+        -8.037983465546692,
+        -2.5241800182422907
+      ],
+      [
+        -8.039927909991132,
+        -2.5241800182422907
+      ],
+      [
+        -8.039927909991132,
+        -2.523902240464512
+      ],
+      [
+        -8.040483465546693,
+        -2.523902240464512
+      ],
+      [
+        -8.040483465546693,
+        -2.5244577960200694
+      ],
+      [
+        -8.040761243324472,
+        -2.5244577960200694
+      ],
+      [
+        -8.040761243324472,
+        -2.524735573797841
+      ],
+      [
+        -8.04103902110225,
+        -2.524735573797841
+      ],
+      [
+        -8.04103902110225,
+        -2.525291129353402
+      ],
+      [
+        -8.041316798880022,
+        -2.525291129353402
+      ],
+      [
+        -8.041316798880022,
+        -2.5255689071311807
+      ],
+      [
+        -8.04103902110225,
+        -2.5255689071311807
+      ]
+    ],
+    [
+      [
+        -8.03965013221336,
+        -2.5258466849089523
+      ],
+      [
+        -8.03965013221336,
+        -2.526124462686731
+      ],
+      [
+        -8.039927909991132,
+        -2.526124462686731
+      ],
+      [
+        -8.039927909991132,
+        -2.5258466849089523
+      ],
+      [
+        -8.03965013221336,
+        -2.5258466849089523
+      ]
+    ],
+    [
+      [
+        -8.039927909991132,
+        -2.5250133515756197
+      ],
+      [
+        -8.039927909991132,
+        -2.5258466849089523
+      ],
+      [
+        -8.04020568776891,
+        -2.5258466849089523
+      ],
+      [
+        -8.04020568776891,
+        -2.525291129353402
+      ],
+      [
+        -8.040761243324472,
+        -2.525291129353402
+      ],
+      [
+        -8.040761243324472,
+        -2.5250133515756197
+      ],
+      [
+        -8.039927909991132,
+        -2.5250133515756197
+      ]
+    ],
+    [
+      [
+        -8.04020568776891,
+        -2.526124462686731
+      ],
+      [
+        -8.039927909991132,
+        -2.526124462686731
+      ],
+      [
+        -8.039927909991132,
+        -2.52695779602006
+      ],
+      [
+        -8.040761243324472,
+        -2.52695779602006
+      ],
+      [
+        -8.040761243324472,
+        -2.526680018242292
+      ],
+      [
+        -8.04020568776891,
+        -2.526680018242292
+      ],
+      [
+        -8.04020568776891,
+        -2.526124462686731
+      ]
+    ],
+    [
+      [
+        -8.04103902110225,
+        -2.526124462686731
+      ],
+      [
+        -8.04103902110225,
+        -2.5258466849089523
+      ],
+      [
+        -8.040483465546693,
+        -2.5258466849089523
+      ],
+      [
+        -8.040483465546693,
+        -2.526124462686731
+      ],
+      [
+        -8.04103902110225,
+        -2.526124462686731
+      ]
+    ],
+    [
+      [
+        -8.03965013221336,
+        -2.525291129353402
+      ],
+      [
+        -8.03965013221336,
+        -2.524735573797841
+      ],
+      [
+        -8.039372354435582,
+        -2.524735573797841
+      ],
+      [
+        -8.039372354435582,
+        -2.525291129353402
+      ],
+      [
+        -8.03965013221336,
+        -2.525291129353402
+      ]
+    ]
+  ]
+}
diff --git a/attachments/LUCENE-10563/polygon-2.json b/attachments/LUCENE-10563/polygon-2.json
new file mode 100644
index 00000000..f02ea78d
--- /dev/null
+++ b/attachments/LUCENE-10563/polygon-2.json
@@ -0,0 +1,171 @@
+{
+  "type": "Polygon",
+  "coordinates": [
+    [
+      [
+        177.96645568838056,
+        2.84595887064717
+      ],
+      [
+        177.96645568838056,
+        2.8456810928693894
+      ],
+      [
+        177.96590013282497,
+        2.8456810928693894
+      ],
+      [
+        177.96590013282497,
+        2.845403315091609
+      ],
+      [
+        177.96562235504717,
+        2.845403315091609
+      ],
+      [
+        177.96562235504717,
+        2.8448477595360604
+      ],
+      [
+        177.96534457726938,
+        2.8448477595360604
+      ],
+      [
+        177.96534457726938,
+        2.8440144262027296
+      ],
+      [
+        177.96562235504717,
+        2.8440144262027296
+      ],
+      [
+        177.96562235504717,
+        2.8434588706471704
+      ],
+      [
+        177.96590013282497,
+        2.8434588706471704
+      ],
+      [
+        177.96590013282497,
+        2.84318109286939
+      ],
+      [
+        177.96673346615827,
+        2.84318109286939
+      ],
+      [
+        177.96673346615827,
+        2.8434588706471704
+      ],
+      [
+        177.96701124393607,
+        2.8434588706471704
+      ],
+      [
+        177.96701124393607,
+        2.84456998175828
+      ],
+      [
+        177.96728902171387,
+        2.84456998175828
+      ],
+      [
+        177.96728902171387,
+        2.845125537313839
+      ],
+      [
+        177.96701124393607,
+        2.845125537313839
+      ],
+      [
+        177.96701124393607,
+        2.845403315091609
+      ],
+      [
+        177.96645568838056,
+        2.845403315091609
+      ],
+      [
+        177.96645568838056,
+        2.845125537313839
+      ],
+      [
+        177.96617791060277,
+        2.845125537313839
+      ],
+      [
+        177.96617791060277,
+        2.8448477595360604
+      ],
+      [
+        177.96590013282497,
+        2.8448477595360604
+      ],
+      [
+        177.96590013282497,
+        2.8440144262027296
+      ],
+      [
+        177.96617791060277,
+        2.8440144262027296
+      ],
+      [
+        177.96617791060277,
+        2.843736648424949
+      ],
+      [
+        177.96673346615827,
+        2.843736648424949
+      ],
+      [
+        177.96673346615827,
+        2.8440144262027296
+      ],
+      [
+        177.96728902171387,
+        2.8440144262027296
+      ],
+      [
+        177.96728902171387,
+        2.8442922039804994
+      ],
+      [
+        177.96756679949166,
+        2.8442922039804994
+      ],
+      [
+        177.96756679949166,
+        2.845125537313839
+      ],
+      [
+        177.96784457726937,
+        2.845125537313839
+      ],
+      [
+        177.96784457726937,
+        2.845403315091609
+      ],
+      [
+        177.96756679949166,
+        2.845403315091609
+      ],
+      [
+        177.96756679949166,
+        2.8456810928693894
+      ],
+      [
+        177.96728902171387,
+        2.8456810928693894
+      ],
+      [
+        177.96728902171387,
+        2.84595887064717
+      ],
+      [
+        177.96645568838056,
+        2.84595887064717
+      ]
+    ]
+  ]
+}
diff --git a/attachments/LUCENE-10563/polygon-3.json b/attachments/LUCENE-10563/polygon-3.json
new file mode 100644
index 00000000..dd02e4bc
--- /dev/null
+++ b/attachments/LUCENE-10563/polygon-3.json
@@ -0,0 +1,169 @@
+{
+  "type": "Polygon",
+  "coordinates": [
+    [
+      [
+        126.94993320113078,
+        -11.22709668490965
+      ],
+      [
+        126.94993320113078,
+        -11.227374462687422
+      ],
+      [
+        126.94965542335298,
+        -11.227374462687422
+      ],
+      [
+        126.94965542335298,
+        -11.22793001824298
+      ],
+      [
+        126.94993320113078,
+        -11.22793001824298
+      ],
+      [
+        126.94993320113078,
+        -11.22848557379854
+      ],
+      [
+        126.94965542335298,
+        -11.22848557379854
+      ],
+      [
+        126.94965542335298,
+        -11.228763351576312
+      ],
+      [
+        126.94937764557518,
+        -11.228763351576312
+      ],
+      [
+        126.94937764557518,
+        -11.2276522404652
+      ],
+      [
+        126.94826653446408,
+        -11.2276522404652
+      ],
+      [
+        126.94826653446408,
+        -11.228763351576312
+      ],
+      [
+        126.94909986779739,
+        -11.228763351576312
+      ],
+      [
+        126.94909986779739,
+        -11.22793001824298
+      ],
+      [
+        126.94854431224188,
+        -11.22793001824298
+      ],
+      [
+        126.94854431224188,
+        -11.22904112935409
+      ],
+      [
+        126.94937764557518,
+        -11.22904112935409
+      ],
+      [
+        126.94937764557518,
+        -11.22931890713187
+      ],
+      [
+        126.95104431224188,
+        -11.22931890713187
+      ],
+      [
+        126.95104431224188,
+        -11.229596684909652
+      ],
+      [
+        126.95159986779738,
+        -11.229596684909652
+      ],
+      [
+        126.95159986779738,
+        -11.228763351576312
+      ],
+      [
+        126.95187764557518,
+        -11.228763351576312
+      ],
+      [
+        126.95187764557518,
+        -11.2276522404652
+      ],
+      [
+        126.95159986779738,
+        -11.2276522404652
+      ],
+      [
+        126.95159986779738,
+        -11.22848557379854
+      ],
+      [
+        126.95132209001967,
+        -11.22848557379854
+      ],
+      [
+        126.95132209001967,
+        -11.228763351576312
+      ],
+      [
+        126.95076653446408,
+        -11.228763351576312
+      ],
+      [
+        126.95076653446408,
+        -11.22793001824298
+      ],
+      [
+        126.95104431224188,
+        -11.22793001824298
+      ],
+      [
+        126.95104431224188,
+        -11.227374462687422
+      ],
+      [
+        126.95076653446408,
+        -11.227374462687422
+      ],
+      [
+        126.95076653446408,
+        -11.22709668490965
+      ],
+      [
+        126.94993320113078,
+        -11.22709668490965
+      ]
+    ],
+    [
+      [
+        126.95021097890857,
+        -11.2276522404652
+      ],
+      [
+        126.95048875668628,
+        -11.2276522404652
+      ],
+      [
+        126.95048875668628,
+        -11.22793001824298
+      ],
+      [
+        126.95021097890857,
+        -11.22793001824298
+      ],
+      [
+        126.95021097890857,
+        -11.2276522404652
+      ]
+    ]
+  ]
+}
diff --git a/attachments/LUCENE-10568/Image 007.png b/attachments/LUCENE-10568/Image 007.png
new file mode 100644
index 00000000..cc4e25d9
Binary files /dev/null and b/attachments/LUCENE-10568/Image 007.png differ
diff --git a/attachments/LUCENE-10570/LUCENE-10570.patch b/attachments/LUCENE-10570/LUCENE-10570.patch
new file mode 100644
index 00000000..188a227e
--- /dev/null
+++ b/attachments/LUCENE-10570/LUCENE-10570.patch
@@ -0,0 +1,181 @@
+diff --git a/lucene/monitor/src/java/org/apache/lucene/monitor/RejectUnconstrainedQueriesPresearcherWrapper.java b/lucene/monitor/src/java/org/apache/lucene/monitor/RejectUnconstrainedQueriesPresearcherWrapper.java
+new file mode 100644
+index 00000000000..cdacf61c2a1
+--- /dev/null
++++ b/lucene/monitor/src/java/org/apache/lucene/monitor/RejectUnconstrainedQueriesPresearcherWrapper.java
+@@ -0,0 +1,67 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.lucene.monitor;
++
++import java.util.Map;
++import java.util.function.BiPredicate;
++import org.apache.lucene.document.Document;
++import org.apache.lucene.index.LeafReader;
++import org.apache.lucene.search.Query;
++import org.apache.lucene.util.BytesRef;
++
++/**
++ * Presearcher implementation that wraps another Presearcher, providing an {@link #indexQuery} impl
++ * that will throw a {@link IllegalArgumentException} if the <code>ANYTOKEN_FIELD</code> is used.
++ *
++ * <p>This allows the creation of Monitor instances that will reject the registration of queries
++ * that require a "forward searched" test against all documents.
++ */
++public class RejectUnconstrainedQueriesPresearcherWrapper extends Presearcher {
++
++  static final String ANYTOKEN_FIELD = TermFilteredPresearcher.ANYTOKEN_FIELD;
++  private final String msg;
++  private final Presearcher inner;
++
++  /**
++   * Creates a new RejectUnconstrainedQueriesPresearcherWrapper
++   *
++   * @param inner presearcher to wrap.
++   * @param msg exception message to use.
++   */
++  public RejectUnconstrainedQueriesPresearcherWrapper(final Presearcher inner, final String msg) {
++    assert null != inner;
++    assert null != msg;
++
++    this.inner = inner;
++    this.msg = msg;
++  }
++
++  @Override
++  public Query buildQuery(LeafReader reader, BiPredicate<String, BytesRef> termAcceptor) {
++    return inner.buildQuery(reader, termAcceptor);
++  }
++
++  @Override
++  public Document indexQuery(Query query, Map<String, String> metadata) {
++    final Document doc = inner.indexQuery(query, metadata);
++    if (null != doc.getField(ANYTOKEN_FIELD)) {
++      throw new IllegalArgumentException(msg);
++    }
++    return doc;
++  }
++}
+diff --git a/lucene/monitor/src/test/org/apache/lucene/monitor/TestRejectUnconstrainedQueriesPresearcherWrapper.java b/lucene/monitor/src/test/org/apache/lucene/monitor/TestRejectUnconstrainedQueriesPresearcherWrapper.java
+new file mode 100644
+index 00000000000..bb944da7c5a
+--- /dev/null
++++ b/lucene/monitor/src/test/org/apache/lucene/monitor/TestRejectUnconstrainedQueriesPresearcherWrapper.java
+@@ -0,0 +1,102 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.lucene.monitor;
++
++import java.io.IOException;
++import java.util.Collections;
++import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
++
++public class TestRejectUnconstrainedQueriesPresearcherWrapper extends MonitorTestBase {
++
++  @Override
++  public Monitor newMonitor() throws IOException {
++    return new Monitor(new WhitespaceAnalyzer(), createPresearcher());
++  }
++
++  protected Presearcher createPresearcher() {
++    return new RejectUnconstrainedQueriesPresearcherWrapper(
++        new MultipassTermFilteredPresearcher(
++            4, 0, TermWeightor.DEFAULT, Collections.emptyList(), Collections.singleton("language")),
++        "bad dog, no biscut");
++  }
++
++  public void testReject() throws IOException {
++    Presearcher presearcher = createPresearcher();
++    IllegalArgumentException e =
++        expectThrows(
++            IllegalArgumentException.class,
++            () -> {
++              presearcher.indexQuery(parse("*:*"), Collections.emptyMap());
++            });
++    assertEquals("bad dog, no biscut", e.getMessage());
++  }
++
++  public void testEmptyMonitorHandling() throws IOException {
++    try (Monitor monitor = newMonitor()) {
++      MatchingQueries<QueryMatch> matches =
++          monitor.match(PresearcherTestBase.buildDoc("field_2", "test"), QueryMatch.SIMPLE_MATCHER);
++      assertEquals(0, matches.getMatchCount());
++      assertEquals(0, matches.getQueriesRun());
++    }
++  }
++
++  public void testSimple() throws IOException {
++
++    try (Monitor monitor = newMonitor()) {
++      monitor.register(new MonitorQuery("1", parse("field:xxxx")));
++      monitor.register(new MonitorQuery("2", parse("field:yyyy")));
++
++      MatchingQueries<QueryMatch> matches =
++          monitor.match(PresearcherTestBase.buildDoc("field", "xxxx"), QueryMatch.SIMPLE_MATCHER);
++      assertEquals(1, matches.getQueriesRun());
++      assertNotNull(matches.matches("1"));
++    }
++  }
++
++  public void testSimpleBoolean() throws IOException {
++
++    try (Monitor monitor = newMonitor()) {
++      monitor.register(
++          new MonitorQuery("1", parse("field:\"hello world\"")),
++          new MonitorQuery("2", parse("field:world")),
++          new MonitorQuery("3", parse("field:\"hello there world\"")),
++          new MonitorQuery("4", parse("field:\"this and that\"")));
++
++      MatchingQueries<QueryMatch> matches =
++          monitor.match(
++              PresearcherTestBase.buildDoc("field", "hello world and goodbye"),
++              QueryMatch.SIMPLE_MATCHER);
++      assertEquals(2, matches.getQueriesRun());
++      assertNotNull(matches.matches("1"));
++    }
++  }
++
++  public void testRejectRegisterMatchAllQueriesWithMetadataFilterFields() throws IOException {
++    try (Monitor monitor = newMonitor()) {
++      IllegalArgumentException e =
++          expectThrows(
++              IllegalArgumentException.class,
++              () -> {
++                monitor.register(
++                    new MonitorQuery(
++                        "1", parse("*:*"), null, Collections.singletonMap("language", "de")));
++              });
++      assertEquals("bad dog, no biscut", e.getMessage());
++    }
++  }
++}
diff --git a/attachments/LUCENE-10571/LUCENE-10571.patch b/attachments/LUCENE-10571/LUCENE-10571.patch
new file mode 100644
index 00000000..f72da213
--- /dev/null
+++ b/attachments/LUCENE-10571/LUCENE-10571.patch
@@ -0,0 +1,498 @@
+diff --git a/lucene/monitor/src/java/org/apache/lucene/monitor/HuperDuperTermFilteredPresearcher.java b/lucene/monitor/src/java/org/apache/lucene/monitor/HuperDuperTermFilteredPresearcher.java
+new file mode 100644
+index 00000000000..421f8208170
+--- /dev/null
++++ b/lucene/monitor/src/java/org/apache/lucene/monitor/HuperDuperTermFilteredPresearcher.java
+@@ -0,0 +1,308 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.lucene.monitor;
++
++import java.io.IOException;
++import java.util.ArrayList;
++import java.util.Collections;
++import java.util.HashMap;
++import java.util.List;
++import java.util.Map;
++import java.util.Set;
++import java.util.function.BiPredicate;
++import org.apache.lucene.analysis.FilteringTokenFilter;
++import org.apache.lucene.analysis.TokenStream;
++import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
++import org.apache.lucene.document.Document;
++import org.apache.lucene.document.Field;
++import org.apache.lucene.document.FieldType;
++import org.apache.lucene.document.StringField;
++import org.apache.lucene.document.TextField;
++import org.apache.lucene.index.FieldInfo;
++import org.apache.lucene.index.IndexOptions;
++import org.apache.lucene.index.LeafReader;
++import org.apache.lucene.index.Term;
++import org.apache.lucene.index.Terms;
++import org.apache.lucene.index.TermsEnum;
++import org.apache.lucene.search.BooleanClause;
++import org.apache.lucene.search.BooleanQuery;
++import org.apache.lucene.search.Query;
++import org.apache.lucene.search.TermInSetQuery;
++import org.apache.lucene.search.TermQuery;
++import org.apache.lucene.util.BytesRef;
++import org.apache.lucene.util.BytesRefHash;
++import org.apache.lucene.util.BytesRefIterator;
++
++/**
++ * Presearcher implementation that uses terms extracted from queries to index them in the Monitor,
++ * and builds a disjunction from terms in a document to match them.
++ *
++ * <p>Handling of queries that do not support term extraction through the {@link
++ * org.apache.lucene.search.QueryVisitor} API can be configured by passing a list of {@link
++ * CustomQueryHandler} implementations.
++ *
++ * <p>Filtering by additional fields can be configured by passing a set of field names. Documents
++ * that contain values in those fields will only be checked against {@link MonitorQuery} instances
++ * that have the same fieldname-value mapping in their metadata <em>or have no mapping for that
++ * fieldname</em>. Documents that do not contain values in those fields will only be checked against
++ * {@link MonitorQuery} instances that also have no mapping for that fieldname.
++ */
++public class HuperDuperTermFilteredPresearcher extends Presearcher {
++
++  /** The default TermWeightor, weighting by token length */
++  public static final TermWeightor DEFAULT_WEIGHTOR = TermWeightor.DEFAULT;
++
++  private final QueryAnalyzer extractor;
++  private final TermWeightor weightor;
++
++  private final Set<String> filterFields;
++  private final List<CustomQueryHandler> queryHandlers = new ArrayList<>();
++
++  static final String ANYTOKEN_FIELD = TermFilteredPresearcher.ANYTOKEN_FIELD;
++  static final String ANYTOKEN = TermFilteredPresearcher.ANYTOKEN;
++
++  static final String MISSING_FILTERS_FIELD = "__missing_filters_field_names";
++
++  /** Creates a new HuperDperTermFilteredPresearcher using the default term weighting */
++  public HuperDuperTermFilteredPresearcher() {
++    this(DEFAULT_WEIGHTOR, Collections.emptyList(), Collections.emptySet());
++  }
++
++  /**
++   * Creates a new HuperDuperTermFilteredPresearcher
++   *
++   * @param weightor the TermWeightor
++   * @param customQueryHandlers A list of custom query handlers to extract terms from non-core
++   *     queries
++   * @param filterFields A set of fields to filter on
++   */
++  public HuperDuperTermFilteredPresearcher(
++      TermWeightor weightor,
++      List<CustomQueryHandler> customQueryHandlers,
++      Set<String> filterFields) {
++    this.extractor = new QueryAnalyzer(customQueryHandlers);
++    this.filterFields = filterFields;
++    this.queryHandlers.addAll(customQueryHandlers);
++    this.weightor = weightor;
++  }
++
++  @Override
++  public final Query buildQuery(LeafReader reader, BiPredicate<String, BytesRef> termAcceptor) {
++    try {
++      DocumentQueryBuilder queryBuilder = getQueryBuilder();
++      for (FieldInfo field : reader.getFieldInfos()) {
++
++        Terms terms = reader.terms(field.name);
++        if (terms == null) {
++          continue;
++        }
++
++        TokenStream ts = new TermsEnumTokenStream(terms.iterator());
++        for (CustomQueryHandler handler : queryHandlers) {
++          ts = handler.wrapTermStream(field.name, ts);
++        }
++
++        ts =
++            new FilteringTokenFilter(ts) {
++              TermToBytesRefAttribute termAtt = addAttribute(TermToBytesRefAttribute.class);
++
++              @Override
++              protected boolean accept() {
++                return filterFields.contains(field.name) == false
++                    && termAcceptor.test(field.name, termAtt.getBytesRef());
++              }
++            };
++
++        TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
++        while (ts.incrementToken()) {
++          queryBuilder.addTerm(field.name, BytesRef.deepCopyOf(termAtt.getBytesRef()));
++        }
++        ts.close();
++      }
++      Query presearcherQuery = queryBuilder.build();
++
++      BooleanQuery.Builder bq = new BooleanQuery.Builder();
++      bq.add(presearcherQuery, BooleanClause.Occur.SHOULD);
++      bq.add(new TermQuery(new Term(ANYTOKEN_FIELD, ANYTOKEN)), BooleanClause.Occur.SHOULD);
++      presearcherQuery = bq.build();
++      if (filterFields.isEmpty() == false) {
++        bq = new BooleanQuery.Builder();
++        bq.add(presearcherQuery, BooleanClause.Occur.MUST);
++        Query filterQuery = buildFilterFields(reader);
++        if (filterQuery != null) {
++          bq.add(filterQuery, BooleanClause.Occur.FILTER);
++          presearcherQuery = bq.build();
++        }
++      }
++      return presearcherQuery;
++    } catch (IOException e) {
++      // We're a MemoryIndex, so this shouldn't happen...
++      throw new RuntimeException(e);
++    }
++  }
++
++  private Query buildFilterFields(LeafReader reader) throws IOException {
++    BooleanQuery.Builder builder = new BooleanQuery.Builder();
++    for (String field : filterFields) {
++      Query q = buildFilterClause(reader, field);
++      assert null != q;
++      builder.add(q, BooleanClause.Occur.MUST);
++    }
++    BooleanQuery bq = builder.build();
++    if (bq.clauses().size() == 0) {
++      return null;
++    }
++    return bq;
++  }
++
++  private Query buildFilterClause(LeafReader reader, String field) throws IOException {
++
++    Query missingQ = new TermQuery(new Term(MISSING_FILTERS_FIELD, field));
++    final Terms terms = reader.terms(field);
++    if (terms == null) return missingQ;
++
++    BooleanQuery.Builder bq = new BooleanQuery.Builder();
++    bq.add(missingQ, BooleanClause.Occur.SHOULD);
++
++    int docsInBatch = reader.maxDoc();
++
++    BytesRef term;
++    TermsEnum te = terms.iterator();
++    while ((term = te.next()) != null) {
++      // we need to check that every document in the batch has the same field values, otherwise
++      // this filtering will not work
++      if (te.docFreq() != docsInBatch)
++        throw new IllegalArgumentException(
++            "Some documents in this batch do not have a term value of "
++                + field
++                + ":"
++                + Term.toString(term));
++      bq.add(new TermQuery(new Term(field, BytesRef.deepCopyOf(term))), BooleanClause.Occur.SHOULD);
++    }
++
++    BooleanQuery built = bq.build();
++    assert 0 < built.clauses().size();
++    if (built.clauses().size() <= 1) return missingQ;
++
++    return built;
++  }
++
++  /** Constructs a document disjunction from a set of terms */
++  protected interface DocumentQueryBuilder {
++
++    /** Add a term from this document */
++    void addTerm(String field, BytesRef term) throws IOException;
++
++    /** @return the final Query */
++    Query build();
++  }
++
++  /** Returns a {@link DocumentQueryBuilder} for this presearcher */
++  protected DocumentQueryBuilder getQueryBuilder() {
++    return new DocumentQueryBuilder() {
++
++      Map<String, List<BytesRef>> terms = new HashMap<>();
++
++      @Override
++      public void addTerm(String field, BytesRef term) {
++        List<BytesRef> t = terms.computeIfAbsent(field, f -> new ArrayList<>());
++        t.add(term);
++      }
++
++      @Override
++      public Query build() {
++        BooleanQuery.Builder builder = new BooleanQuery.Builder();
++        for (Map.Entry<String, List<BytesRef>> entry : terms.entrySet()) {
++          builder.add(
++              new TermInSetQuery(entry.getKey(), entry.getValue()), BooleanClause.Occur.SHOULD);
++        }
++        return builder.build();
++      }
++    };
++  }
++
++  static final FieldType QUERYFIELDTYPE;
++
++  static {
++    QUERYFIELDTYPE = new FieldType(TextField.TYPE_NOT_STORED);
++    QUERYFIELDTYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
++    QUERYFIELDTYPE.freeze();
++  }
++
++  @Override
++  public final Document indexQuery(Query query, Map<String, String> metadata) {
++    QueryTree querytree = extractor.buildTree(query, weightor);
++    Document doc = buildQueryDocument(querytree);
++    for (String field : filterFields) {
++      if (metadata != null && metadata.containsKey(field)) {
++        doc.add(new TextField(field, metadata.get(field), Field.Store.YES));
++      } else {
++        doc.add(new StringField(MISSING_FILTERS_FIELD, field, Field.Store.NO));
++      }
++    }
++    return doc;
++  }
++
++  /** Builds a {@link Document} from the terms extracted from a query */
++  protected Document buildQueryDocument(QueryTree querytree) {
++    Map<String, BytesRefHash> fieldTerms = collectTerms(querytree);
++    Document doc = new Document();
++    for (Map.Entry<String, BytesRefHash> entry : fieldTerms.entrySet()) {
++      doc.add(
++          new Field(
++              entry.getKey(),
++              new TermsEnumTokenStream(new BytesRefHashIterator(entry.getValue())),
++              QUERYFIELDTYPE));
++    }
++    return doc;
++  }
++
++  /** Collects terms from a {@link QueryTree} and maps them per-field */
++  protected Map<String, BytesRefHash> collectTerms(QueryTree querytree) {
++    Map<String, BytesRefHash> fieldTerms = new HashMap<>();
++    querytree.collectTerms(
++        (field, term) -> {
++          BytesRefHash tt = fieldTerms.computeIfAbsent(field, f -> new BytesRefHash());
++          tt.add(term);
++        });
++    return fieldTerms;
++  }
++
++  /** Implements a {@link BytesRefIterator} over a {@link BytesRefHash} */
++  protected class BytesRefHashIterator implements BytesRefIterator {
++
++    final BytesRef scratch = new BytesRef();
++    final BytesRefHash terms;
++    final int[] sortedTerms;
++    int upto = -1;
++
++    BytesRefHashIterator(BytesRefHash terms) {
++      this.terms = terms;
++      this.sortedTerms = terms.sort();
++    }
++
++    @Override
++    public BytesRef next() {
++      if (upto >= sortedTerms.length) return null;
++      upto++;
++      if (sortedTerms[upto] == -1) return null;
++      this.terms.get(sortedTerms[upto], scratch);
++      return scratch;
++    }
++  }
++}
+diff --git a/lucene/monitor/src/test/org/apache/lucene/monitor/FieldFilterPresearcherComponentTestBase.java b/lucene/monitor/src/test/org/apache/lucene/monitor/FieldFilterPresearcherComponentTestBase.java
+index db167f5c9af..aa62e080f52 100644
+--- a/lucene/monitor/src/test/org/apache/lucene/monitor/FieldFilterPresearcherComponentTestBase.java
++++ b/lucene/monitor/src/test/org/apache/lucene/monitor/FieldFilterPresearcherComponentTestBase.java
+@@ -114,6 +114,73 @@ public abstract class FieldFilterPresearcherComponentTestBase extends Presearche
+     }
+   }
+ 
++  public void testMissingFieldFiltering() throws IOException {
++
++    try (Monitor monitor = newMonitor()) {
++      monitor.register(
++          new MonitorQuery("1", parse("test"), null, Collections.singletonMap("language", "en")),
++          new MonitorQuery("2", parse("test"), null, Collections.singletonMap("language", "de")),
++          new MonitorQuery("3", parse("test"), null, Collections.singletonMap("language", "xxxxx")),
++          new MonitorQuery("4", parse("test"), null, Collections.emptyMap()),
++          new MonitorQuery("5", parse("*:*"), null, Collections.singletonMap("language", "en")),
++          new MonitorQuery("6", parse("zzzz"), null, Collections.emptyMap()),
++          // NOTE: these are garunteed to produce a 'test' term making it a candidate query for our
++          // test docs,
++          // but are also garunteed to never (forward) match any document
++          new MonitorQuery("7", parse("+test -test"), null, Collections.emptyMap()),
++          new MonitorQuery(
++              "8", parse("+test -test"), null, Collections.singletonMap("language", "en")));
++
++      {
++        Document enDoc = new Document();
++        enDoc.add(newTextField(TEXTFIELD, "this is a test", Field.Store.NO));
++        enDoc.add(newTextField("language", "en", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> en = monitor.match(enDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(2, en.getMatchCount());
++        assertEquals(3, en.getQueriesRun());
++        assertNotNull(en.matches("1"));
++        assertNotNull(en.matches("5"));
++      }
++      {
++        Document deDoc = new Document();
++        deDoc.add(newTextField(TEXTFIELD, "das ist ein test", Field.Store.NO));
++        deDoc.add(newTextField("language", "de", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> de = monitor.match(deDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(1, de.getMatchCount());
++        assertEquals(1, de.getQueriesRun());
++        assertNotNull(de.matches("2"));
++      }
++      {
++        Document bothDoc = new Document();
++        bothDoc.add(newTextField(TEXTFIELD, "this is ein test", Field.Store.NO));
++        bothDoc.add(newTextField("language", "en", Field.Store.NO));
++        bothDoc.add(newTextField("language", "de", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> both = monitor.match(bothDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(3, both.getMatchCount());
++        assertEquals(4, both.getQueriesRun());
++        assertNotNull(both.matches("1"));
++        assertNotNull(both.matches("2"));
++        assertNotNull(both.matches("5"));
++      }
++      {
++        Document noneDoc = new Document();
++        noneDoc.add(newTextField(TEXTFIELD, "test doc with no lang", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> none = monitor.match(noneDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(5, none.getMatchCount());
++        assertEquals(7, none.getQueriesRun());
++        assertNotNull(none.matches("1"));
++        assertNotNull(none.matches("2"));
++        assertNotNull(none.matches("3"));
++        assertNotNull(none.matches("4"));
++        assertNotNull(none.matches("5"));
++      }
++    }
++  }
++
+   public void testFilteringOnMatchAllQueries() throws IOException {
+     try (Monitor monitor = newMonitor()) {
+       monitor.register(
+diff --git a/lucene/monitor/src/test/org/apache/lucene/monitor/TestHuperDuperTermFieldFilteredPresearcher.java b/lucene/monitor/src/test/org/apache/lucene/monitor/TestHuperDuperTermFieldFilteredPresearcher.java
+new file mode 100644
+index 00000000000..da3c3f72cab
+--- /dev/null
++++ b/lucene/monitor/src/test/org/apache/lucene/monitor/TestHuperDuperTermFieldFilteredPresearcher.java
+@@ -0,0 +1,100 @@
++/*
++ * Licensed to the Apache Software Foundation (ASF) under one or more
++ * contributor license agreements.  See the NOTICE file distributed with
++ * this work for additional information regarding copyright ownership.
++ * The ASF licenses this file to You under the Apache License, Version 2.0
++ * (the "License"); you may not use this file except in compliance with
++ * the License.  You may obtain a copy of the License at
++ *
++ *     http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++package org.apache.lucene.monitor;
++
++import java.io.IOException;
++import java.util.Collections;
++import org.apache.lucene.document.Document;
++import org.apache.lucene.document.Field;
++
++public class TestHuperDuperTermFieldFilteredPresearcher
++    extends FieldFilterPresearcherComponentTestBase {
++
++  @Override
++  protected Presearcher createPresearcher() {
++    return new HuperDuperTermFilteredPresearcher(
++        TermWeightor.DEFAULT, Collections.emptyList(), Collections.singleton("language"));
++  }
++
++  @Override
++  public void testMissingFieldFiltering() throws IOException {
++
++    try (Monitor monitor = newMonitor()) {
++      monitor.register(
++          new MonitorQuery("1", parse("test"), null, Collections.singletonMap("language", "en")),
++          new MonitorQuery("2", parse("test"), null, Collections.singletonMap("language", "de")),
++          new MonitorQuery("3", parse("test"), null, Collections.singletonMap("language", "xxxxx")),
++          new MonitorQuery("4", parse("test"), null, Collections.emptyMap()),
++          new MonitorQuery("5", parse("*:*"), null, Collections.singletonMap("language", "en")),
++          new MonitorQuery("6", parse("zzzz"), null, Collections.emptyMap()),
++          // NOTE: these are garunteed to produce a 'test' term making it a candidate query for our
++          // test docs,
++          // but are also garunteed to never (forward) match any document
++          new MonitorQuery("7", parse("+test -test"), null, Collections.emptyMap()),
++          new MonitorQuery(
++              "8", parse("+test -test"), null, Collections.singletonMap("language", "en")));
++
++      {
++        Document enDoc = new Document();
++        enDoc.add(newTextField(TEXTFIELD, "this is a test", Field.Store.NO));
++        enDoc.add(newTextField("language", "en", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> en = monitor.match(enDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(3, en.getMatchCount());
++        assertEquals(5, en.getQueriesRun());
++        assertNotNull(en.matches("1"));
++        assertNotNull(en.matches("4"));
++        assertNotNull(en.matches("5"));
++      }
++      {
++        Document deDoc = new Document();
++        deDoc.add(newTextField(TEXTFIELD, "das ist ein test", Field.Store.NO));
++        deDoc.add(newTextField("language", "de", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> de = monitor.match(deDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(2, de.getMatchCount());
++        assertEquals(3, de.getQueriesRun());
++        assertNotNull(de.matches("2"));
++        assertNotNull(de.matches("4"));
++      }
++      {
++        Document bothDoc = new Document();
++        bothDoc.add(newTextField(TEXTFIELD, "this is ein test", Field.Store.NO));
++        bothDoc.add(newTextField("language", "en", Field.Store.NO));
++        bothDoc.add(newTextField("language", "de", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> both = monitor.match(bothDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(4, both.getMatchCount());
++        assertEquals(6, both.getQueriesRun());
++        assertNotNull(both.matches("1"));
++        assertNotNull(both.matches("2"));
++        assertNotNull(both.matches("4"));
++        assertNotNull(both.matches("5"));
++      }
++      {
++        Document noneDoc = new Document();
++        noneDoc.add(newTextField(TEXTFIELD, "test doc with no lang", Field.Store.NO));
++
++        MatchingQueries<QueryMatch> none = monitor.match(noneDoc, QueryMatch.SIMPLE_MATCHER);
++        assertEquals(1, none.getMatchCount());
++        assertEquals(2, none.getQueriesRun());
++        assertNotNull(none.matches("4"));
++      }
++    }
++  }
++}
diff --git a/attachments/LUCENE-10572/Screen Shot 2022-05-16 at 10.28.22 AM.png b/attachments/LUCENE-10572/Screen Shot 2022-05-16 at 10.28.22 AM.png
new file mode 100644
index 00000000..469d1767
Binary files /dev/null and b/attachments/LUCENE-10572/Screen Shot 2022-05-16 at 10.28.22 AM.png differ
diff --git a/attachments/LUCENE-10579/backwards.log.gz b/attachments/LUCENE-10579/backwards.log.gz
new file mode 100644
index 00000000..4f86e5ff
Binary files /dev/null and b/attachments/LUCENE-10579/backwards.log.gz differ