You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by cw...@apache.org on 2023/05/17 11:31:58 UTC

[druid] branch 26.0.0 updated: more resilient segment metadata, dont parallel merge internal segment metadata queries (#14296) (#14304)

This is an automated email from the ASF dual-hosted git repository.

cwylie pushed a commit to branch 26.0.0
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/26.0.0 by this push:
     new 387670af51 more resilient segment metadata, dont parallel merge internal segment metadata queries (#14296) (#14304)
387670af51 is described below

commit 387670af516ac85f0f4495e36b70a7c8d1d94fd3
Author: Clint Wylie <cw...@apache.org>
AuthorDate: Wed May 17 04:31:51 2023 -0700

    more resilient segment metadata, dont parallel merge internal segment metadata queries (#14296) (#14304)
---
 .../druid/query/metadata/SegmentAnalyzer.java      | 205 ++++++++++-----------
 .../query/metadata/metadata/ColumnAnalysis.java    | 122 ++++++++++--
 .../druid/query/metadata/SegmentAnalyzerTest.java  |  73 +++++++-
 .../metadata/metadata/ColumnAnalysisTest.java      |   3 +-
 .../sql/calcite/schema/SegmentMetadataCache.java   |   8 +-
 .../calcite/schema/SegmentMetadataCacheTest.java   |   6 +-
 6 files changed, 288 insertions(+), 129 deletions(-)

diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
index acee5b241b..c1cc07ccdf 100644
--- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
+++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
@@ -112,38 +112,52 @@ public class SegmentAnalyzer
         capabilities = storageAdapter.getColumnCapabilities(columnName);
       }
 
-      final ColumnAnalysis analysis;
-
-      switch (capabilities.getType()) {
-        case LONG:
-          final int bytesPerRow =
-              ColumnHolder.TIME_COLUMN_NAME.equals(columnName) ? NUM_BYTES_IN_TIMESTAMP : Long.BYTES;
-
-          analysis = analyzeNumericColumn(capabilities, numRows, bytesPerRow);
-          break;
-        case FLOAT:
-          analysis = analyzeNumericColumn(capabilities, numRows, NUM_BYTES_IN_TEXT_FLOAT);
-          break;
-        case DOUBLE:
-          analysis = analyzeNumericColumn(capabilities, numRows, Double.BYTES);
-          break;
-        case STRING:
-          if (index != null) {
-            analysis = analyzeStringColumn(capabilities, index.getColumnHolder(columnName));
-          } else {
-            analysis = analyzeStringColumn(capabilities, storageAdapter, columnName);
-          }
-          break;
-        case ARRAY:
-          analysis = analyzeArrayColumn(capabilities);
-          break;
-        case COMPLEX:
-          final ColumnHolder columnHolder = index != null ? index.getColumnHolder(columnName) : null;
-          analysis = analyzeComplexColumn(capabilities, numRows, columnHolder);
-          break;
-        default:
-          log.warn("Unknown column type[%s].", capabilities.asTypeString());
-          analysis = ColumnAnalysis.error(StringUtils.format("unknown_type_%s", capabilities.asTypeString()));
+      if (capabilities == null) {
+        log.warn("Unknown column type for column[%s]", columnName);
+        columns.put(columnName, ColumnAnalysis.error("unknown_type"));
+        continue;
+      }
+
+      ColumnAnalysis analysis;
+      try {
+        switch (capabilities.getType()) {
+          case LONG:
+            final int bytesPerRow =
+                ColumnHolder.TIME_COLUMN_NAME.equals(columnName) ? NUM_BYTES_IN_TIMESTAMP : Long.BYTES;
+
+            analysis = analyzeNumericColumn(capabilities, numRows, bytesPerRow);
+            break;
+          case FLOAT:
+            analysis = analyzeNumericColumn(capabilities, numRows, NUM_BYTES_IN_TEXT_FLOAT);
+            break;
+          case DOUBLE:
+            analysis = analyzeNumericColumn(capabilities, numRows, Double.BYTES);
+            break;
+          case STRING:
+            if (index != null) {
+              analysis = analyzeStringColumn(capabilities, index.getColumnHolder(columnName));
+            } else {
+              analysis = analyzeStringColumn(capabilities, storageAdapter, columnName);
+            }
+            break;
+          case ARRAY:
+            analysis = analyzeArrayColumn(capabilities);
+            break;
+          case COMPLEX:
+            final ColumnHolder columnHolder = index != null ? index.getColumnHolder(columnName) : null;
+            analysis = analyzeComplexColumn(capabilities, numRows, columnHolder);
+            break;
+          default:
+            log.warn("Unknown column type[%s] for column[%s].", capabilities.asTypeString(), columnName);
+            analysis = ColumnAnalysis.error(StringUtils.format("unknown_type_%s", capabilities.asTypeString()));
+        }
+      }
+      catch (RuntimeException re) {
+        // eat the exception and add error analysis, this is preferrable to exploding since exploding results in
+        // the broker downstream SQL metadata cache left in a state where it is unable to completely finish
+        // the SQL schema relies on this stuff functioning, and so will continuously retry when it faces a failure
+        log.warn(re, "Error analyzing column[%s] of type[%s]", columnName, capabilities.asTypeString());
+        analysis = ColumnAnalysis.error(re.getMessage());
       }
 
       columns.put(columnName, analysis);
@@ -174,26 +188,16 @@ public class SegmentAnalyzer
   )
   {
     long size = 0;
+    final ColumnAnalysis.Builder bob = ColumnAnalysis.builder().withCapabilities(capabilities);
 
     if (analyzingSize()) {
       if (capabilities.hasMultipleValues().isTrue()) {
-        return ColumnAnalysis.error("multi_value");
+        return bob.withErrorMessage("multi_value").build();
       }
 
       size = ((long) length) * sizePerRow;
     }
-
-    return new ColumnAnalysis(
-        capabilities.toColumnType(),
-        capabilities.getType().name(),
-        capabilities.hasMultipleValues().isTrue(),
-        capabilities.hasNulls().isMaybeTrue(), // if we don't know for sure, then we should plan to check for nulls
-        size,
-        null,
-        null,
-        null,
-        null
-    );
+    return bob.withSize(size).build();
   }
 
   private ColumnAnalysis analyzeStringColumn(
@@ -237,23 +241,20 @@ public class SegmentAnalyzer
         }
       }
       catch (IOException e) {
-        throw new RuntimeException(e);
+        return ColumnAnalysis.builder().withCapabilities(capabilities).withErrorMessage(e.getMessage()).build();
       }
     } else {
       cardinality = 0;
     }
 
-    return new ColumnAnalysis(
-        capabilities.toColumnType(),
-        capabilities.getType().name(),
-        capabilities.hasMultipleValues().isTrue(),
-        capabilities.hasNulls().isMaybeTrue(), // if we don't know for sure, then we should plan to check for nulls
-        size,
-        analyzingCardinality() ? cardinality : 0,
-        min,
-        max,
-        null
-    );
+    return ColumnAnalysis.builder()
+                         .withCapabilities(capabilities)
+                         .withSize(size)
+                         .withCardinality(analyzingCardinality() ? cardinality : 0)
+                         .withMinValue(min)
+                         .withMaxValue(max)
+                         .build();
+
   }
 
   private ColumnAnalysis analyzeStringColumn(
@@ -322,21 +323,17 @@ public class SegmentAnalyzer
       max = storageAdapter.getMaxValue(columnName);
     }
 
-    return new ColumnAnalysis(
-        capabilities.toColumnType(),
-        capabilities.getType().name(),
-        capabilities.hasMultipleValues().isTrue(),
-        capabilities.hasNulls().isMaybeTrue(), // if we don't know for sure, then we should plan to check for nulls
-        size,
-        cardinality,
-        min,
-        max,
-        null
-    );
+    return ColumnAnalysis.builder()
+                         .withCapabilities(capabilities)
+                         .withSize(size)
+                         .withCardinality(cardinality)
+                         .withMinValue(min)
+                         .withMaxValue(max)
+                         .build();
   }
 
   private ColumnAnalysis analyzeComplexColumn(
-      @Nullable final ColumnCapabilities capabilities,
+      final ColumnCapabilities capabilities,
       final int numCells,
       @Nullable final ColumnHolder columnHolder
   )
@@ -344,67 +341,51 @@ public class SegmentAnalyzer
     final TypeSignature<ValueType> typeSignature = capabilities == null ? ColumnType.UNKNOWN_COMPLEX : capabilities;
     final String typeName = typeSignature.getComplexTypeName();
 
+    final ColumnAnalysis.Builder bob = ColumnAnalysis.builder()
+                                                     .withType(ColumnTypeFactory.ofType(typeSignature))
+                                                     .withTypeName(typeName);
+
     try (final BaseColumn theColumn = columnHolder != null ? columnHolder.getColumn() : null) {
+      if (theColumn != null && !(theColumn instanceof ComplexColumn)) {
+        return bob.withErrorMessage(
+                    StringUtils.format(
+                        "[%s] is not a [%s]",
+                        theColumn.getClass().getName(),
+                        ComplexColumn.class.getName()
+                    )
+                  )
+                  .build();
+      }
       final ComplexColumn complexColumn = (ComplexColumn) theColumn;
-      final boolean hasMultipleValues = capabilities != null && capabilities.hasMultipleValues().isTrue();
-      final boolean hasNulls = capabilities != null && capabilities.hasNulls().isMaybeTrue();
-      long size = 0;
 
+      bob.hasMultipleValues(capabilities.hasMultipleValues().isTrue())
+         .hasNulls(capabilities.hasNulls().isMaybeTrue());
+
+      long size = 0;
       if (analyzingSize() && complexColumn != null) {
+
         final ComplexMetricSerde serde = typeName == null ? null : ComplexMetrics.getSerdeForType(typeName);
         if (serde == null) {
-          return ColumnAnalysis.error(StringUtils.format("unknown_complex_%s", typeName));
+          return bob.withErrorMessage(StringUtils.format("unknown_complex_%s", typeName)).build();
         }
 
         final Function<Object, Long> inputSizeFn = serde.inputSizeFn();
-        if (inputSizeFn == null) {
-          return new ColumnAnalysis(
-              ColumnTypeFactory.ofType(typeSignature),
-              typeName,
-              hasMultipleValues,
-              hasNulls,
-              0,
-              null,
-              null,
-              null,
-              null
-          );
-        }
+        if (inputSizeFn != null) {
 
-        for (int i = 0; i < numCells; ++i) {
-          size += inputSizeFn.apply(complexColumn.getRowValue(i));
+          for (int i = 0; i < numCells; ++i) {
+            size += inputSizeFn.apply(complexColumn.getRowValue(i));
+          }
         }
       }
-
-      return new ColumnAnalysis(
-          ColumnTypeFactory.ofType(typeSignature),
-          typeName,
-          hasMultipleValues,
-          hasNulls,
-          size,
-          null,
-          null,
-          null,
-          null
-      );
+      return bob.withSize(size).build();
     }
     catch (IOException e) {
-      throw new RuntimeException(e);
+      return bob.withErrorMessage(e.getMessage()).build();
     }
   }
 
   private ColumnAnalysis analyzeArrayColumn(final ColumnCapabilities capabilities)
   {
-    return new ColumnAnalysis(
-        capabilities.toColumnType(),
-        capabilities.getType().name(),
-        false,
-        capabilities.hasNulls().isTrue(),
-        0L,
-        null,
-        null,
-        null,
-        null
-    );
+    return ColumnAnalysis.builder().withCapabilities(capabilities).build();
   }
 }
diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java
index 446a13c08e..d157ee8c1a 100644
--- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java
+++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/ColumnAnalysis.java
@@ -24,8 +24,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeInfo;
 import org.apache.druid.common.config.NullHandling;
 import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.segment.column.ColumnCapabilities;
 import org.apache.druid.segment.column.ColumnType;
 
+import javax.annotation.Nullable;
 import java.util.Objects;
 
 /**
@@ -34,9 +36,17 @@ public class ColumnAnalysis
 {
   private static final String ERROR_PREFIX = "error:";
 
+  public static <T> Builder<T> builder()
+  {
+    return new Builder<T>();
+  }
+
   public static ColumnAnalysis error(String reason)
   {
-    return new ColumnAnalysis(ColumnType.STRING, "STRING", false, false, -1, null, null, null, ERROR_PREFIX + reason);
+    return builder().withType(ColumnType.STRING)
+                    .withSize(-1)
+                    .withErrorMessage(reason)
+                    .build();
   }
 
   private final String type;
@@ -182,17 +192,15 @@ public class ColumnAnalysis
       newMin = NullHandling.nullToEmptyIfNeeded((String) newMin);
       newMax = NullHandling.nullToEmptyIfNeeded((String) newMax);
     }
-    return new ColumnAnalysis(
-        typeSignature,
-        type,
-        multipleValues,
-        hasNulls || rhs.hasNulls,
-        size + rhs.getSize(),
-        cardinality,
-        newMin,
-        newMax,
-        null
-    );
+    return builder().withType(typeSignature)
+                    .withTypeName(type)
+                    .hasMultipleValues(multipleValues)
+                    .hasNulls(hasNulls || rhs.hasNulls)
+                    .withSize(size + rhs.getSize())
+                    .withCardinality(cardinality)
+                    .withMinValue(newMin)
+                    .withMaxValue(newMax)
+                    .build();
   }
 
   private <T extends Comparable> T choose(T obj1, T obj2, boolean max)
@@ -259,4 +267,94 @@ public class ColumnAnalysis
         errorMessage
     );
   }
+
+  public static class Builder<T>
+  {
+    private ColumnType typeSignature;
+    private String typeName;
+    private boolean hasMultipleValues;
+    private boolean hasNulls;
+    private long size;
+    private Integer cardinality;
+    private Comparable<T> minValue;
+    private Comparable<T> maxValue;
+    private String errorMessage;
+
+    public Builder withCapabilities(ColumnCapabilities capabilities)
+    {
+
+      return withType(capabilities.toColumnType()).hasMultipleValues(capabilities.hasMultipleValues().isTrue())
+                                                  // if we don't know for sure, then we should plan to check for nulls
+                                                  .hasNulls(capabilities.hasNulls().isMaybeTrue());
+    }
+
+    public Builder withType(ColumnType columnType)
+    {
+      this.typeSignature = columnType;
+      return this;
+    }
+
+    public Builder withTypeName(String typeName)
+    {
+      this.typeName = typeName;
+      return this;
+    }
+
+    public Builder hasMultipleValues(boolean hasMultipleValues)
+    {
+      this.hasMultipleValues = hasMultipleValues;
+      return this;
+    }
+
+    public Builder hasNulls(boolean hasNulls)
+    {
+      this.hasNulls = hasNulls;
+      return this;
+    }
+
+    public Builder withSize(long size)
+    {
+      this.size = size;
+      return this;
+    }
+
+    public Builder withCardinality(@Nullable Integer cardinality)
+    {
+      this.cardinality = cardinality;
+      return this;
+    }
+
+    public Builder withMinValue(Comparable<T> minValue)
+    {
+      this.minValue = minValue;
+      return this;
+    }
+
+    public Builder withMaxValue(Comparable<T> maxValue)
+    {
+      this.maxValue = maxValue;
+      return this;
+    }
+
+    public Builder withErrorMessage(String errorMessage)
+    {
+      this.errorMessage = ERROR_PREFIX + errorMessage;
+      return this;
+    }
+
+    public ColumnAnalysis build()
+    {
+      return new ColumnAnalysis(
+          typeSignature,
+          typeName == null ? typeSignature.getType().name() : typeName,
+          hasMultipleValues,
+          hasNulls,
+          size,
+          cardinality,
+          minValue,
+          maxValue,
+          errorMessage
+      );
+    }
+  }
 }
diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
index adedb9c9e8..dc0dc72d25 100644
--- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
+++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
@@ -53,9 +53,12 @@ import org.apache.druid.segment.Segment;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.TestIndex;
 import org.apache.druid.segment.column.ColumnBuilder;
+import org.apache.druid.segment.column.ColumnCapabilitiesImpl;
 import org.apache.druid.segment.column.ColumnHolder;
 import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.StringDictionaryEncodedColumn;
 import org.apache.druid.segment.column.ValueType;
+import org.apache.druid.segment.data.ListIndexed;
 import org.apache.druid.segment.data.ObjectStrategy;
 import org.apache.druid.segment.incremental.IncrementalIndex;
 import org.apache.druid.segment.incremental.IncrementalIndexSchema;
@@ -65,6 +68,7 @@ import org.apache.druid.segment.serde.ComplexMetricSerde;
 import org.apache.druid.segment.serde.ComplexMetrics;
 import org.apache.druid.testing.InitializedNullHandlingTest;
 import org.apache.druid.timeline.SegmentId;
+import org.easymock.EasyMock;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
@@ -356,7 +360,7 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest
   }
 
   @Test
-  public void testComplexAnalysisNullColumn() throws IOException
+  public void testAnalysisNullAutoDiscoveredColumn() throws IOException
   {
     IndexBuilder bob = IndexBuilder.create();
     bob.tmpDir(temporaryFolder.newFolder());
@@ -383,6 +387,72 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest
     Assert.assertFalse(analysis.get("x").isError());
   }
 
+  @Test
+  public void testAnalysisAutoNullColumn() throws IOException
+  {
+    IndexBuilder bob = IndexBuilder.create();
+    bob.tmpDir(temporaryFolder.newFolder());
+    bob.writeNullColumns(true);
+    InputRowSchema schema = new InputRowSchema(
+        new TimestampSpec("time", null, null),
+        DimensionsSpec.builder().useSchemaDiscovery(true).build(),
+        null
+    );
+    bob.schema(IncrementalIndexSchema.builder()
+                                     .withTimestampSpec(schema.getTimestampSpec())
+                                     .withDimensionsSpec(schema.getDimensionsSpec())
+                                     .build());
+    bob.rows(ImmutableList.of(
+        MapInputRowParser.parse(schema, TestHelper.makeMapWithExplicitNull("time", 1234L, "x", null)))
+    );
+
+    QueryableIndex queryableIndex = bob.buildMMappedIndex();
+    Segment s = new QueryableIndexSegment(queryableIndex, SegmentId.dummy("test"));
+
+    SegmentAnalyzer analyzer = new SegmentAnalyzer(EMPTY_ANALYSES);
+    Map<String, ColumnAnalysis> analysis = analyzer.analyze(s);
+    Assert.assertEquals(ColumnType.STRING, analysis.get("x").getTypeSignature());
+    Assert.assertFalse(analysis.get("x").isError());
+  }
+
+  @Test
+  public void testAnalysisImproperComplex() throws IOException
+  {
+    QueryableIndex mockIndex = EasyMock.createMock(QueryableIndex.class);
+    EasyMock.expect(mockIndex.getNumRows()).andReturn(100).atLeastOnce();
+    EasyMock.expect(mockIndex.getColumnNames()).andReturn(Collections.singletonList("x")).atLeastOnce();
+    EasyMock.expect(mockIndex.getAvailableDimensions())
+            .andReturn(new ListIndexed<>(Collections.singletonList("x")))
+            .atLeastOnce();
+    EasyMock.expect(mockIndex.getColumnCapabilities(ColumnHolder.TIME_COLUMN_NAME))
+            .andReturn(ColumnCapabilitiesImpl.createDefault().setType(ColumnType.LONG))
+            .atLeastOnce();
+    EasyMock.expect(mockIndex.getColumnCapabilities("x"))
+            .andReturn(ColumnCapabilitiesImpl.createDefault().setType(ColumnType.UNKNOWN_COMPLEX))
+            .atLeastOnce();
+
+    ColumnHolder holder = EasyMock.createMock(ColumnHolder.class);
+    EasyMock.expect(mockIndex.getColumnHolder("x")).andReturn(holder).atLeastOnce();
+
+    StringDictionaryEncodedColumn dictionaryEncodedColumn = EasyMock.createMock(StringDictionaryEncodedColumn.class);
+    EasyMock.expect(holder.getColumn()).andReturn(dictionaryEncodedColumn).atLeastOnce();
+
+    dictionaryEncodedColumn.close();
+    EasyMock.expectLastCall();
+    EasyMock.replay(mockIndex, holder, dictionaryEncodedColumn);
+
+    Segment s = new QueryableIndexSegment(mockIndex, SegmentId.dummy("test"));
+
+    SegmentAnalyzer analyzer = new SegmentAnalyzer(EMPTY_ANALYSES);
+    Map<String, ColumnAnalysis> analysis = analyzer.analyze(s);
+    Assert.assertEquals(ColumnType.UNKNOWN_COMPLEX, analysis.get("x").getTypeSignature());
+    Assert.assertTrue(analysis.get("x").isError());
+    Assert.assertTrue(analysis.get("x").getErrorMessage().contains("is not a [org.apache.druid.segment.column.ComplexColumn]"));
+
+    EasyMock.verify(mockIndex, holder, dictionaryEncodedColumn);
+  }
+
+
   private static final class DummyObjectStrategy implements ObjectStrategy
   {
 
@@ -529,5 +599,4 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest
       return getIntermediateType();
     }
   }
-
 }
diff --git a/processing/src/test/java/org/apache/druid/query/metadata/metadata/ColumnAnalysisTest.java b/processing/src/test/java/org/apache/druid/query/metadata/metadata/ColumnAnalysisTest.java
index 9b1f75b5d7..a62b509228 100644
--- a/processing/src/test/java/org/apache/druid/query/metadata/metadata/ColumnAnalysisTest.java
+++ b/processing/src/test/java/org/apache/druid/query/metadata/metadata/ColumnAnalysisTest.java
@@ -22,10 +22,11 @@ package org.apache.druid.query.metadata.metadata;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.testing.InitializedNullHandlingTest;
 import org.junit.Assert;
 import org.junit.Test;
 
-public class ColumnAnalysisTest
+public class ColumnAnalysisTest extends InitializedNullHandlingTest
 {
   private final ObjectMapper MAPPER = TestHelper.makeJsonMapper();
 
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCache.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCache.java
index 6d7d2f5264..8da4e647a9 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCache.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCache.java
@@ -47,6 +47,7 @@ import org.apache.druid.java.util.emitter.EmittingLogger;
 import org.apache.druid.java.util.emitter.service.ServiceEmitter;
 import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
 import org.apache.druid.query.GlobalTableDataSource;
+import org.apache.druid.query.QueryContexts;
 import org.apache.druid.query.TableDataSource;
 import org.apache.druid.query.metadata.metadata.AllColumnIncluderator;
 import org.apache.druid.query.metadata.metadata.ColumnAnalysis;
@@ -920,7 +921,12 @@ public class SegmentMetadataCache
         querySegmentSpec,
         new AllColumnIncluderator(),
         false,
-        brokerInternalQueryConfig.getContext(),
+        // disable the parallel merge because we don't care about the merge and don't want to consume its resources
+        QueryContexts.override(
+            brokerInternalQueryConfig.getContext(),
+            QueryContexts.BROKER_PARALLEL_MERGE_KEY,
+            false
+        ),
         EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class),
         false,
         false
diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheTest.java
index b87eedd474..213ad453a6 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/SegmentMetadataCacheTest.java
@@ -36,6 +36,7 @@ import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.Pair;
 import org.apache.druid.java.util.common.guava.Sequences;
 import org.apache.druid.query.GlobalTableDataSource;
+import org.apache.druid.query.QueryContexts;
 import org.apache.druid.query.TableDataSource;
 import org.apache.druid.query.aggregation.CountAggregatorFactory;
 import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
@@ -1130,7 +1131,10 @@ public class SegmentMetadataCacheTest extends SegmentMetadataCacheCommon
   @Test
   public void testRunSegmentMetadataQueryWithContext() throws Exception
   {
-    Map<String, Object> queryContext = ImmutableMap.of("priority", 5);
+    Map<String, Object> queryContext = ImmutableMap.of(
+        QueryContexts.PRIORITY_KEY, 5,
+        QueryContexts.BROKER_PARALLEL_MERGE_KEY, false
+    );
 
     String brokerInternalQueryConfigJson = "{\"context\": { \"priority\": 5} }";
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org