You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pinot.apache.org by ki...@apache.org on 2020/11/01 01:10:15 UTC
[incubator-pinot] 01/01: Adding index creator and reader
This is an automated email from the ASF dual-hosted git repository.
kishoreg pushed a commit to branch json-indexing
in repository https://gitbox.apache.org/repos/asf/incubator-pinot.git
commit ee6714c2ac4b6eea174d48e862fb7020494b6c1d
Author: kishoreg <g....@gmail.com>
AuthorDate: Sun Oct 18 22:05:51 2020 -0700
Adding index creator and reader
---
.../antlr4/org/apache/pinot/pql/parsers/PQL2.g4 | 4 +
.../pinot/common/request/AggregationInfo.java | 52 +-
.../apache/pinot/common/request/BrokerRequest.java | 353 ++++++-----
.../apache/pinot/common/request/DataSource.java | 11 +-
.../apache/pinot/common/request/Expression.java | 35 +-
.../pinot/common/request/ExpressionType.java | 4 +-
.../pinot/common/request/FilterOperator.java | 11 +-
.../apache/pinot/common/request/FilterQuery.java | 46 +-
.../pinot/common/request/FilterQueryMap.java | 11 +-
.../org/apache/pinot/common/request/Function.java | 21 +-
.../org/apache/pinot/common/request/GroupBy.java | 85 ++-
.../pinot/common/request/HavingFilterQuery.java | 46 +-
.../pinot/common/request/HavingFilterQueryMap.java | 11 +-
.../apache/pinot/common/request/Identifier.java | 16 +-
.../pinot/common/request/InstanceRequest.java | 76 ++-
.../org/apache/pinot/common/request/Literal.java | 4 +-
.../apache/pinot/common/request/PinotQuery.java | 61 +-
.../apache/pinot/common/request/QuerySource.java | 11 +-
.../org/apache/pinot/common/request/QueryType.java | 31 +-
.../org/apache/pinot/common/request/Selection.java | 94 ++-
.../apache/pinot/common/request/SelectionSort.java | 16 +-
.../pinot/common/response/ProcessingException.java | 283 ++++-----
.../apache/pinot/parsers/utils/ParserUtils.java | 1 +
.../parsers/PinotQuery2BrokerRequestConverter.java | 1 +
.../pinot/pql/parsers/pql2/ast/FilterKind.java | 3 +-
.../apache/pinot/sql/parsers/CalciteSqlParser.java | 8 +-
pinot-common/src/thrift/request.thrift | 3 +-
.../org/apache/pinot/core/common/DataSource.java | 7 +
.../indexsegment/mutable/MutableSegmentImpl.java | 12 +-
.../io/util/VarLengthBytesValueReaderWriter.java | 28 +-
.../operator/filter/BitmapBasedFilterOperator.java | 4 +-
.../core/operator/filter/FilterOperatorUtils.java | 2 +-
.../operator/filter/JSONMatchFilterOperator.java | 148 +++++
.../transform/function/CastTransformFunction.java | 2 +-
.../org/apache/pinot/core/plan/FilterPlanNode.java | 15 +
.../{Predicate.java => JSONMatchPredicate.java} | 57 +-
.../query/request/context/predicate/Predicate.java | 3 +-
.../context/utils/QueryContextConverterUtils.java | 4 +
.../core/segment/creator/impl/V1Constants.java | 1 +
.../segment/creator/impl/inv/JSONIndexCreator.java | 658 +++++++++++++++++++++
.../creator/impl/inv/NestedObjectIndexCreator.java | 158 -----
.../segment/index/column/ColumnIndexContainer.java | 7 +
.../index/column/PhysicalColumnIndexContainer.java | 15 +
.../segment/index/datasource/BaseDataSource.java | 13 +-
.../index/datasource/ImmutableDataSource.java | 5 +-
.../index/datasource/MutableDataSource.java | 5 +-
.../segment/index/loader/IndexLoadingConfig.java | 10 +
.../segment/index/loader/SegmentPreProcessor.java | 6 +
.../loader/invertedindex/JSONIndexHandler.java | 216 +++++++
.../segment/index/readers/JSONIndexReader.java | 160 +++++
.../pinot/core/segment/store/ColumnIndexType.java | 3 +-
.../core/segment/store/FilePerIndexDirectory.java | 5 +
.../virtualcolumn/VirtualColumnIndexContainer.java | 6 +
.../core/startree/v2/store/StarTreeDataSource.java | 3 +-
.../pinot/spi/config/table/IndexingConfig.java | 9 +
.../java/org/apache/pinot/spi/data/FieldSpec.java | 2 +-
.../pinot/tools/admin/command/JSONQuickstart.java | 224 +++++++
57 files changed, 2167 insertions(+), 919 deletions(-)
diff --git a/pinot-common/src/main/antlr4/org/apache/pinot/pql/parsers/PQL2.g4 b/pinot-common/src/main/antlr4/org/apache/pinot/pql/parsers/PQL2.g4
index 7a2c816..e96e575 100644
--- a/pinot-common/src/main/antlr4/org/apache/pinot/pql/parsers/PQL2.g4
+++ b/pinot-common/src/main/antlr4/org/apache/pinot/pql/parsers/PQL2.g4
@@ -78,6 +78,7 @@ predicate:
| isClause # IsPredicate
| regexpLikeClause # RegexpLikePredicate
| textMatchClause # TextMatchPredicate
+ | jsonMatchClause # JsonMatchPredicate
;
inClause:
@@ -99,6 +100,9 @@ regexpLikeClause:
textMatchClause:
TEXT_MATCH '(' expression ',' literal ')';
+jsonMatchClause:
+ JSON_MATCH '(' expression ',' literal ')';
+
booleanOperator: OR | AND;
groupByClause: GROUP BY groupByList;
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/AggregationInfo.java b/pinot-common/src/main/java/org/apache/pinot/common/request/AggregationInfo.java
index d3eb2f1..7aa8d93 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/AggregationInfo.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/AggregationInfo.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -28,9 +28,9 @@ package org.apache.pinot.common.request;
/**
* AUTO GENERATED: DO NOT EDIT
* Aggregation
- *
+ *
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2020-04-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo, AggregationInfo._Fields>, java.io.Serializable, Cloneable, Comparable<AggregationInfo> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AggregationInfo");
@@ -42,10 +42,10 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new AggregationInfoStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new AggregationInfoTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.lang.String aggregationType; // optional
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> aggregationParams; // optional
- public boolean isInSelectList; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String aggregationType; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> aggregationParams; // optional
+ private boolean isInSelectList; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -123,16 +123,16 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.AGGREGATION_TYPE, new org.apache.thrift.meta_data.FieldMetaData("aggregationType", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.AGGREGATION_TYPE, new org.apache.thrift.meta_data.FieldMetaData("aggregationType", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- tmpMap.put(_Fields.AGGREGATION_PARAMS, new org.apache.thrift.meta_data.FieldMetaData("aggregationParams", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ tmpMap.put(_Fields.AGGREGATION_PARAMS, new org.apache.thrift.meta_data.FieldMetaData("aggregationParams", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
- tmpMap.put(_Fields.IS_IN_SELECT_LIST, new org.apache.thrift.meta_data.FieldMetaData("isInSelectList", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.IS_IN_SELECT_LIST, new org.apache.thrift.meta_data.FieldMetaData("isInSelectList", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
- tmpMap.put(_Fields.EXPRESSIONS, new org.apache.thrift.meta_data.FieldMetaData("expressions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ tmpMap.put(_Fields.EXPRESSIONS, new org.apache.thrift.meta_data.FieldMetaData("expressions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AggregationInfo.class, metaDataMap);
@@ -178,9 +178,8 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
return this.aggregationType;
}
- public AggregationInfo setAggregationType(@org.apache.thrift.annotation.Nullable java.lang.String aggregationType) {
+ public void setAggregationType(@org.apache.thrift.annotation.Nullable java.lang.String aggregationType) {
this.aggregationType = aggregationType;
- return this;
}
public void unsetAggregationType() {
@@ -214,9 +213,8 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
return this.aggregationParams;
}
- public AggregationInfo setAggregationParams(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> aggregationParams) {
+ public void setAggregationParams(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> aggregationParams) {
this.aggregationParams = aggregationParams;
- return this;
}
public void unsetAggregationParams() {
@@ -238,10 +236,9 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
return this.isInSelectList;
}
- public AggregationInfo setIsInSelectList(boolean isInSelectList) {
+ public void setIsInSelectList(boolean isInSelectList) {
this.isInSelectList = isInSelectList;
setIsInSelectListIsSet(true);
- return this;
}
public void unsetIsInSelectList() {
@@ -278,9 +275,8 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
return this.expressions;
}
- public AggregationInfo setExpressions(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions) {
+ public void setExpressions(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions) {
this.expressions = expressions;
- return this;
}
public void unsetExpressions() {
@@ -595,7 +591,7 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
while (true)
{
schemeField = iprot.readFieldBegin();
- if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
@@ -603,7 +599,7 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.aggregationType = iprot.readString();
struct.setAggregationTypeIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -623,7 +619,7 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
iprot.readMapEnd();
}
struct.setAggregationParamsIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -631,7 +627,7 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.isInSelectList = iprot.readBool();
struct.setIsInSelectListIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -649,7 +645,7 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
iprot.readListEnd();
}
struct.setExpressionsIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -659,8 +655,6 @@ public class AggregationInfo implements org.apache.thrift.TBase<AggregationInfo,
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/BrokerRequest.java b/pinot-common/src/main/java/org/apache/pinot/common/request/BrokerRequest.java
index f3ff93a..a902118 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/BrokerRequest.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/BrokerRequest.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -28,9 +28,9 @@ package org.apache.pinot.common.request;
/**
* AUTO GENERATED: DO NOT EDIT
* Broker Query
- *
+ *
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-08-16")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, BrokerRequest._Fields>, java.io.Serializable, Cloneable, Comparable<BrokerRequest> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BrokerRequest");
@@ -57,25 +57,25 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new BrokerRequestStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new BrokerRequestTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable QueryType queryType; // optional
- public @org.apache.thrift.annotation.Nullable QuerySource querySource; // optional
- public @org.apache.thrift.annotation.Nullable java.lang.String timeInterval; // optional
- public @org.apache.thrift.annotation.Nullable java.lang.String duration; // optional
- public @org.apache.thrift.annotation.Nullable FilterQuery filterQuery; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<AggregationInfo> aggregationsInfo; // optional
- public @org.apache.thrift.annotation.Nullable GroupBy groupBy; // optional
- public @org.apache.thrift.annotation.Nullable Selection selections; // optional
- public @org.apache.thrift.annotation.Nullable FilterQueryMap filterSubQueryMap; // optional
- public @org.apache.thrift.annotation.Nullable java.lang.String bucketHashKey; // optional
- public boolean enableTrace; // optional
- public @org.apache.thrift.annotation.Nullable java.lang.String responseFormat; // optional
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions; // optional
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions; // optional
- public @org.apache.thrift.annotation.Nullable HavingFilterQuery havingFilterQuery; // optional
- public @org.apache.thrift.annotation.Nullable HavingFilterQueryMap havingFilterSubQueryMap; // optional
- public @org.apache.thrift.annotation.Nullable org.apache.pinot.common.request.PinotQuery pinotQuery; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> orderBy; // optional
- public int limit; // optional
+ private @org.apache.thrift.annotation.Nullable QueryType queryType; // optional
+ private @org.apache.thrift.annotation.Nullable QuerySource querySource; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String timeInterval; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String duration; // optional
+ private @org.apache.thrift.annotation.Nullable FilterQuery filterQuery; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<AggregationInfo> aggregationsInfo; // optional
+ private @org.apache.thrift.annotation.Nullable GroupBy groupBy; // optional
+ private @org.apache.thrift.annotation.Nullable Selection selections; // optional
+ private @org.apache.thrift.annotation.Nullable FilterQueryMap filterSubQueryMap; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String bucketHashKey; // optional
+ private boolean enableTrace; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String responseFormat; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions; // optional
+ private @org.apache.thrift.annotation.Nullable HavingFilterQuery havingFilterQuery; // optional
+ private @org.apache.thrift.annotation.Nullable HavingFilterQueryMap havingFilterSubQueryMap; // optional
+ private @org.apache.thrift.annotation.Nullable org.apache.pinot.common.request.PinotQuery pinotQuery; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> orderBy; // optional
+ private int limit; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -199,49 +199,49 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.QUERY_TYPE, new org.apache.thrift.meta_data.FieldMetaData("queryType", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.QUERY_TYPE, new org.apache.thrift.meta_data.FieldMetaData("queryType", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, QueryType.class)));
- tmpMap.put(_Fields.QUERY_SOURCE, new org.apache.thrift.meta_data.FieldMetaData("querySource", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.QUERY_SOURCE, new org.apache.thrift.meta_data.FieldMetaData("querySource", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, QuerySource.class)));
- tmpMap.put(_Fields.TIME_INTERVAL, new org.apache.thrift.meta_data.FieldMetaData("timeInterval", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.TIME_INTERVAL, new org.apache.thrift.meta_data.FieldMetaData("timeInterval", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- tmpMap.put(_Fields.DURATION, new org.apache.thrift.meta_data.FieldMetaData("duration", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.DURATION, new org.apache.thrift.meta_data.FieldMetaData("duration", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- tmpMap.put(_Fields.FILTER_QUERY, new org.apache.thrift.meta_data.FieldMetaData("filterQuery", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.FILTER_QUERY, new org.apache.thrift.meta_data.FieldMetaData("filterQuery", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, FilterQuery.class)));
- tmpMap.put(_Fields.AGGREGATIONS_INFO, new org.apache.thrift.meta_data.FieldMetaData("aggregationsInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ tmpMap.put(_Fields.AGGREGATIONS_INFO, new org.apache.thrift.meta_data.FieldMetaData("aggregationsInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, AggregationInfo.class))));
- tmpMap.put(_Fields.GROUP_BY, new org.apache.thrift.meta_data.FieldMetaData("groupBy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.GROUP_BY, new org.apache.thrift.meta_data.FieldMetaData("groupBy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GroupBy.class)));
- tmpMap.put(_Fields.SELECTIONS, new org.apache.thrift.meta_data.FieldMetaData("selections", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.SELECTIONS, new org.apache.thrift.meta_data.FieldMetaData("selections", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Selection.class)));
- tmpMap.put(_Fields.FILTER_SUB_QUERY_MAP, new org.apache.thrift.meta_data.FieldMetaData("filterSubQueryMap", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.FILTER_SUB_QUERY_MAP, new org.apache.thrift.meta_data.FieldMetaData("filterSubQueryMap", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, FilterQueryMap.class)));
- tmpMap.put(_Fields.BUCKET_HASH_KEY, new org.apache.thrift.meta_data.FieldMetaData("bucketHashKey", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.BUCKET_HASH_KEY, new org.apache.thrift.meta_data.FieldMetaData("bucketHashKey", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- tmpMap.put(_Fields.ENABLE_TRACE, new org.apache.thrift.meta_data.FieldMetaData("enableTrace", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.ENABLE_TRACE, new org.apache.thrift.meta_data.FieldMetaData("enableTrace", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
- tmpMap.put(_Fields.RESPONSE_FORMAT, new org.apache.thrift.meta_data.FieldMetaData("responseFormat", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.RESPONSE_FORMAT, new org.apache.thrift.meta_data.FieldMetaData("responseFormat", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- tmpMap.put(_Fields.DEBUG_OPTIONS, new org.apache.thrift.meta_data.FieldMetaData("debugOptions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ tmpMap.put(_Fields.DEBUG_OPTIONS, new org.apache.thrift.meta_data.FieldMetaData("debugOptions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
- tmpMap.put(_Fields.QUERY_OPTIONS, new org.apache.thrift.meta_data.FieldMetaData("queryOptions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ tmpMap.put(_Fields.QUERY_OPTIONS, new org.apache.thrift.meta_data.FieldMetaData("queryOptions", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
- tmpMap.put(_Fields.HAVING_FILTER_QUERY, new org.apache.thrift.meta_data.FieldMetaData("havingFilterQuery", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.HAVING_FILTER_QUERY, new org.apache.thrift.meta_data.FieldMetaData("havingFilterQuery", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HavingFilterQuery.class)));
- tmpMap.put(_Fields.HAVING_FILTER_SUB_QUERY_MAP, new org.apache.thrift.meta_data.FieldMetaData("havingFilterSubQueryMap", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.HAVING_FILTER_SUB_QUERY_MAP, new org.apache.thrift.meta_data.FieldMetaData("havingFilterSubQueryMap", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, HavingFilterQueryMap.class)));
- tmpMap.put(_Fields.PINOT_QUERY, new org.apache.thrift.meta_data.FieldMetaData("pinotQuery", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.PINOT_QUERY, new org.apache.thrift.meta_data.FieldMetaData("pinotQuery", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, org.apache.pinot.common.request.PinotQuery.class)));
- tmpMap.put(_Fields.ORDER_BY, new org.apache.thrift.meta_data.FieldMetaData("orderBy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ tmpMap.put(_Fields.ORDER_BY, new org.apache.thrift.meta_data.FieldMetaData("orderBy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SelectionSort.class))));
- tmpMap.put(_Fields.LIMIT, new org.apache.thrift.meta_data.FieldMetaData("limit", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ tmpMap.put(_Fields.LIMIT, new org.apache.thrift.meta_data.FieldMetaData("limit", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(BrokerRequest.class, metaDataMap);
@@ -356,9 +356,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.queryType;
}
- public BrokerRequest setQueryType(@org.apache.thrift.annotation.Nullable QueryType queryType) {
+ public void setQueryType(@org.apache.thrift.annotation.Nullable QueryType queryType) {
this.queryType = queryType;
- return this;
}
public void unsetQueryType() {
@@ -381,9 +380,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.querySource;
}
- public BrokerRequest setQuerySource(@org.apache.thrift.annotation.Nullable QuerySource querySource) {
+ public void setQuerySource(@org.apache.thrift.annotation.Nullable QuerySource querySource) {
this.querySource = querySource;
- return this;
}
public void unsetQuerySource() {
@@ -406,9 +404,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.timeInterval;
}
- public BrokerRequest setTimeInterval(@org.apache.thrift.annotation.Nullable java.lang.String timeInterval) {
+ public void setTimeInterval(@org.apache.thrift.annotation.Nullable java.lang.String timeInterval) {
this.timeInterval = timeInterval;
- return this;
}
public void unsetTimeInterval() {
@@ -431,9 +428,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.duration;
}
- public BrokerRequest setDuration(@org.apache.thrift.annotation.Nullable java.lang.String duration) {
+ public void setDuration(@org.apache.thrift.annotation.Nullable java.lang.String duration) {
this.duration = duration;
- return this;
}
public void unsetDuration() {
@@ -456,9 +452,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.filterQuery;
}
- public BrokerRequest setFilterQuery(@org.apache.thrift.annotation.Nullable FilterQuery filterQuery) {
+ public void setFilterQuery(@org.apache.thrift.annotation.Nullable FilterQuery filterQuery) {
this.filterQuery = filterQuery;
- return this;
}
public void unsetFilterQuery() {
@@ -497,9 +492,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.aggregationsInfo;
}
- public BrokerRequest setAggregationsInfo(@org.apache.thrift.annotation.Nullable java.util.List<AggregationInfo> aggregationsInfo) {
+ public void setAggregationsInfo(@org.apache.thrift.annotation.Nullable java.util.List<AggregationInfo> aggregationsInfo) {
this.aggregationsInfo = aggregationsInfo;
- return this;
}
public void unsetAggregationsInfo() {
@@ -522,9 +516,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.groupBy;
}
- public BrokerRequest setGroupBy(@org.apache.thrift.annotation.Nullable GroupBy groupBy) {
+ public void setGroupBy(@org.apache.thrift.annotation.Nullable GroupBy groupBy) {
this.groupBy = groupBy;
- return this;
}
public void unsetGroupBy() {
@@ -547,9 +540,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.selections;
}
- public BrokerRequest setSelections(@org.apache.thrift.annotation.Nullable Selection selections) {
+ public void setSelections(@org.apache.thrift.annotation.Nullable Selection selections) {
this.selections = selections;
- return this;
}
public void unsetSelections() {
@@ -572,9 +564,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.filterSubQueryMap;
}
- public BrokerRequest setFilterSubQueryMap(@org.apache.thrift.annotation.Nullable FilterQueryMap filterSubQueryMap) {
+ public void setFilterSubQueryMap(@org.apache.thrift.annotation.Nullable FilterQueryMap filterSubQueryMap) {
this.filterSubQueryMap = filterSubQueryMap;
- return this;
}
public void unsetFilterSubQueryMap() {
@@ -597,9 +588,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.bucketHashKey;
}
- public BrokerRequest setBucketHashKey(@org.apache.thrift.annotation.Nullable java.lang.String bucketHashKey) {
+ public void setBucketHashKey(@org.apache.thrift.annotation.Nullable java.lang.String bucketHashKey) {
this.bucketHashKey = bucketHashKey;
- return this;
}
public void unsetBucketHashKey() {
@@ -621,10 +611,9 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.enableTrace;
}
- public BrokerRequest setEnableTrace(boolean enableTrace) {
+ public void setEnableTrace(boolean enableTrace) {
this.enableTrace = enableTrace;
setEnableTraceIsSet(true);
- return this;
}
public void unsetEnableTrace() {
@@ -645,9 +634,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.responseFormat;
}
- public BrokerRequest setResponseFormat(@org.apache.thrift.annotation.Nullable java.lang.String responseFormat) {
+ public void setResponseFormat(@org.apache.thrift.annotation.Nullable java.lang.String responseFormat) {
this.responseFormat = responseFormat;
- return this;
}
public void unsetResponseFormat() {
@@ -681,9 +669,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.debugOptions;
}
- public BrokerRequest setDebugOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions) {
+ public void setDebugOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions) {
this.debugOptions = debugOptions;
- return this;
}
public void unsetDebugOptions() {
@@ -717,9 +704,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.queryOptions;
}
- public BrokerRequest setQueryOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions) {
+ public void setQueryOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions) {
this.queryOptions = queryOptions;
- return this;
}
public void unsetQueryOptions() {
@@ -742,9 +728,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.havingFilterQuery;
}
- public BrokerRequest setHavingFilterQuery(@org.apache.thrift.annotation.Nullable HavingFilterQuery havingFilterQuery) {
+ public void setHavingFilterQuery(@org.apache.thrift.annotation.Nullable HavingFilterQuery havingFilterQuery) {
this.havingFilterQuery = havingFilterQuery;
- return this;
}
public void unsetHavingFilterQuery() {
@@ -767,9 +752,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.havingFilterSubQueryMap;
}
- public BrokerRequest setHavingFilterSubQueryMap(@org.apache.thrift.annotation.Nullable HavingFilterQueryMap havingFilterSubQueryMap) {
+ public void setHavingFilterSubQueryMap(@org.apache.thrift.annotation.Nullable HavingFilterQueryMap havingFilterSubQueryMap) {
this.havingFilterSubQueryMap = havingFilterSubQueryMap;
- return this;
}
public void unsetHavingFilterSubQueryMap() {
@@ -792,9 +776,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.pinotQuery;
}
- public BrokerRequest setPinotQuery(@org.apache.thrift.annotation.Nullable org.apache.pinot.common.request.PinotQuery pinotQuery) {
+ public void setPinotQuery(@org.apache.thrift.annotation.Nullable org.apache.pinot.common.request.PinotQuery pinotQuery) {
this.pinotQuery = pinotQuery;
- return this;
}
public void unsetPinotQuery() {
@@ -833,9 +816,8 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.orderBy;
}
- public BrokerRequest setOrderBy(@org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> orderBy) {
+ public void setOrderBy(@org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> orderBy) {
this.orderBy = orderBy;
- return this;
}
public void unsetOrderBy() {
@@ -857,10 +839,9 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
return this.limit;
}
- public BrokerRequest setLimit(int limit) {
+ public void setLimit(int limit) {
this.limit = limit;
setLimitIsSet(true);
- return this;
}
public void unsetLimit() {
@@ -1886,7 +1867,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
while (true)
{
schemeField = iprot.readFieldBegin();
- if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
@@ -1895,7 +1876,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.queryType = new QueryType();
struct.queryType.read(iprot);
struct.setQueryTypeIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1904,7 +1885,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.querySource = new QuerySource();
struct.querySource.read(iprot);
struct.setQuerySourceIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1912,7 +1893,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.timeInterval = iprot.readString();
struct.setTimeIntervalIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1920,7 +1901,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.duration = iprot.readString();
struct.setDurationIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1929,26 +1910,26 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.filterQuery = new FilterQuery();
struct.filterQuery.read(iprot);
struct.setFilterQueryIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // AGGREGATIONS_INFO
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list94 = iprot.readListBegin();
- struct.aggregationsInfo = new java.util.ArrayList<AggregationInfo>(_list94.size);
- @org.apache.thrift.annotation.Nullable AggregationInfo _elem95;
- for (int _i96 = 0; _i96 < _list94.size; ++_i96)
+ org.apache.thrift.protocol.TList _list102 = iprot.readListBegin();
+ struct.aggregationsInfo = new java.util.ArrayList<AggregationInfo>(_list102.size);
+ @org.apache.thrift.annotation.Nullable AggregationInfo _elem103;
+ for (int _i104 = 0; _i104 < _list102.size; ++_i104)
{
- _elem95 = new AggregationInfo();
- _elem95.read(iprot);
- struct.aggregationsInfo.add(_elem95);
+ _elem103 = new AggregationInfo();
+ _elem103.read(iprot);
+ struct.aggregationsInfo.add(_elem103);
}
iprot.readListEnd();
}
struct.setAggregationsInfoIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1957,7 +1938,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.groupBy = new GroupBy();
struct.groupBy.read(iprot);
struct.setGroupByIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1966,7 +1947,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.selections = new Selection();
struct.selections.read(iprot);
struct.setSelectionsIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1975,7 +1956,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.filterSubQueryMap = new FilterQueryMap();
struct.filterSubQueryMap.read(iprot);
struct.setFilterSubQueryMapIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1983,7 +1964,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.bucketHashKey = iprot.readString();
struct.setBucketHashKeyIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1991,7 +1972,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.enableTrace = iprot.readBool();
struct.setEnableTraceIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -1999,47 +1980,47 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.responseFormat = iprot.readString();
struct.setResponseFormatIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 13: // DEBUG_OPTIONS
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
- org.apache.thrift.protocol.TMap _map97 = iprot.readMapBegin();
- struct.debugOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map97.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _key98;
- @org.apache.thrift.annotation.Nullable java.lang.String _val99;
- for (int _i100 = 0; _i100 < _map97.size; ++_i100)
+ org.apache.thrift.protocol.TMap _map105 = iprot.readMapBegin();
+ struct.debugOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map105.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _key106;
+ @org.apache.thrift.annotation.Nullable java.lang.String _val107;
+ for (int _i108 = 0; _i108 < _map105.size; ++_i108)
{
- _key98 = iprot.readString();
- _val99 = iprot.readString();
- struct.debugOptions.put(_key98, _val99);
+ _key106 = iprot.readString();
+ _val107 = iprot.readString();
+ struct.debugOptions.put(_key106, _val107);
}
iprot.readMapEnd();
}
struct.setDebugOptionsIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 14: // QUERY_OPTIONS
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
- org.apache.thrift.protocol.TMap _map101 = iprot.readMapBegin();
- struct.queryOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map101.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _key102;
- @org.apache.thrift.annotation.Nullable java.lang.String _val103;
- for (int _i104 = 0; _i104 < _map101.size; ++_i104)
+ org.apache.thrift.protocol.TMap _map109 = iprot.readMapBegin();
+ struct.queryOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map109.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _key110;
+ @org.apache.thrift.annotation.Nullable java.lang.String _val111;
+ for (int _i112 = 0; _i112 < _map109.size; ++_i112)
{
- _key102 = iprot.readString();
- _val103 = iprot.readString();
- struct.queryOptions.put(_key102, _val103);
+ _key110 = iprot.readString();
+ _val111 = iprot.readString();
+ struct.queryOptions.put(_key110, _val111);
}
iprot.readMapEnd();
}
struct.setQueryOptionsIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -2048,7 +2029,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.havingFilterQuery = new HavingFilterQuery();
struct.havingFilterQuery.read(iprot);
struct.setHavingFilterQueryIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -2057,7 +2038,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.havingFilterSubQueryMap = new HavingFilterQueryMap();
struct.havingFilterSubQueryMap.read(iprot);
struct.setHavingFilterSubQueryMapIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -2066,26 +2047,26 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
struct.pinotQuery = new org.apache.pinot.common.request.PinotQuery();
struct.pinotQuery.read(iprot);
struct.setPinotQueryIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 18: // ORDER_BY
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list105 = iprot.readListBegin();
- struct.orderBy = new java.util.ArrayList<SelectionSort>(_list105.size);
- @org.apache.thrift.annotation.Nullable SelectionSort _elem106;
- for (int _i107 = 0; _i107 < _list105.size; ++_i107)
+ org.apache.thrift.protocol.TList _list113 = iprot.readListBegin();
+ struct.orderBy = new java.util.ArrayList<SelectionSort>(_list113.size);
+ @org.apache.thrift.annotation.Nullable SelectionSort _elem114;
+ for (int _i115 = 0; _i115 < _list113.size; ++_i115)
{
- _elem106 = new SelectionSort();
- _elem106.read(iprot);
- struct.orderBy.add(_elem106);
+ _elem114 = new SelectionSort();
+ _elem114.read(iprot);
+ struct.orderBy.add(_elem114);
}
iprot.readListEnd();
}
struct.setOrderByIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -2093,7 +2074,7 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.limit = iprot.readI32();
struct.setLimitIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -2103,8 +2084,6 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
@@ -2152,9 +2131,9 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
oprot.writeFieldBegin(AGGREGATIONS_INFO_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.aggregationsInfo.size()));
- for (AggregationInfo _iter108 : struct.aggregationsInfo)
+ for (AggregationInfo _iter116 : struct.aggregationsInfo)
{
- _iter108.write(oprot);
+ _iter116.write(oprot);
}
oprot.writeListEnd();
}
@@ -2206,10 +2185,10 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
oprot.writeFieldBegin(DEBUG_OPTIONS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.debugOptions.size()));
- for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter109 : struct.debugOptions.entrySet())
+ for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter117 : struct.debugOptions.entrySet())
{
- oprot.writeString(_iter109.getKey());
- oprot.writeString(_iter109.getValue());
+ oprot.writeString(_iter117.getKey());
+ oprot.writeString(_iter117.getValue());
}
oprot.writeMapEnd();
}
@@ -2221,10 +2200,10 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
oprot.writeFieldBegin(QUERY_OPTIONS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.queryOptions.size()));
- for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter110 : struct.queryOptions.entrySet())
+ for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter118 : struct.queryOptions.entrySet())
{
- oprot.writeString(_iter110.getKey());
- oprot.writeString(_iter110.getValue());
+ oprot.writeString(_iter118.getKey());
+ oprot.writeString(_iter118.getValue());
}
oprot.writeMapEnd();
}
@@ -2257,9 +2236,9 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
oprot.writeFieldBegin(ORDER_BY_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.orderBy.size()));
- for (SelectionSort _iter111 : struct.orderBy)
+ for (SelectionSort _iter119 : struct.orderBy)
{
- _iter111.write(oprot);
+ _iter119.write(oprot);
}
oprot.writeListEnd();
}
@@ -2365,9 +2344,9 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (struct.isSetAggregationsInfo()) {
{
oprot.writeI32(struct.aggregationsInfo.size());
- for (AggregationInfo _iter112 : struct.aggregationsInfo)
+ for (AggregationInfo _iter120 : struct.aggregationsInfo)
{
- _iter112.write(oprot);
+ _iter120.write(oprot);
}
}
}
@@ -2392,20 +2371,20 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (struct.isSetDebugOptions()) {
{
oprot.writeI32(struct.debugOptions.size());
- for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter113 : struct.debugOptions.entrySet())
+ for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter121 : struct.debugOptions.entrySet())
{
- oprot.writeString(_iter113.getKey());
- oprot.writeString(_iter113.getValue());
+ oprot.writeString(_iter121.getKey());
+ oprot.writeString(_iter121.getValue());
}
}
}
if (struct.isSetQueryOptions()) {
{
oprot.writeI32(struct.queryOptions.size());
- for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter114 : struct.queryOptions.entrySet())
+ for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter122 : struct.queryOptions.entrySet())
{
- oprot.writeString(_iter114.getKey());
- oprot.writeString(_iter114.getValue());
+ oprot.writeString(_iter122.getKey());
+ oprot.writeString(_iter122.getValue());
}
}
}
@@ -2421,9 +2400,9 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
if (struct.isSetOrderBy()) {
{
oprot.writeI32(struct.orderBy.size());
- for (SelectionSort _iter115 : struct.orderBy)
+ for (SelectionSort _iter123 : struct.orderBy)
{
- _iter115.write(oprot);
+ _iter123.write(oprot);
}
}
}
@@ -2461,14 +2440,14 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
}
if (incoming.get(5)) {
{
- org.apache.thrift.protocol.TList _list116 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
- struct.aggregationsInfo = new java.util.ArrayList<AggregationInfo>(_list116.size);
- @org.apache.thrift.annotation.Nullable AggregationInfo _elem117;
- for (int _i118 = 0; _i118 < _list116.size; ++_i118)
+ org.apache.thrift.protocol.TList _list124 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.aggregationsInfo = new java.util.ArrayList<AggregationInfo>(_list124.size);
+ @org.apache.thrift.annotation.Nullable AggregationInfo _elem125;
+ for (int _i126 = 0; _i126 < _list124.size; ++_i126)
{
- _elem117 = new AggregationInfo();
- _elem117.read(iprot);
- struct.aggregationsInfo.add(_elem117);
+ _elem125 = new AggregationInfo();
+ _elem125.read(iprot);
+ struct.aggregationsInfo.add(_elem125);
}
}
struct.setAggregationsInfoIsSet(true);
@@ -2502,30 +2481,30 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
}
if (incoming.get(12)) {
{
- org.apache.thrift.protocol.TMap _map119 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.debugOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map119.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _key120;
- @org.apache.thrift.annotation.Nullable java.lang.String _val121;
- for (int _i122 = 0; _i122 < _map119.size; ++_i122)
+ org.apache.thrift.protocol.TMap _map127 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.debugOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map127.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _key128;
+ @org.apache.thrift.annotation.Nullable java.lang.String _val129;
+ for (int _i130 = 0; _i130 < _map127.size; ++_i130)
{
- _key120 = iprot.readString();
- _val121 = iprot.readString();
- struct.debugOptions.put(_key120, _val121);
+ _key128 = iprot.readString();
+ _val129 = iprot.readString();
+ struct.debugOptions.put(_key128, _val129);
}
}
struct.setDebugOptionsIsSet(true);
}
if (incoming.get(13)) {
{
- org.apache.thrift.protocol.TMap _map123 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.queryOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map123.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _key124;
- @org.apache.thrift.annotation.Nullable java.lang.String _val125;
- for (int _i126 = 0; _i126 < _map123.size; ++_i126)
+ org.apache.thrift.protocol.TMap _map131 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.queryOptions = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map131.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _key132;
+ @org.apache.thrift.annotation.Nullable java.lang.String _val133;
+ for (int _i134 = 0; _i134 < _map131.size; ++_i134)
{
- _key124 = iprot.readString();
- _val125 = iprot.readString();
- struct.queryOptions.put(_key124, _val125);
+ _key132 = iprot.readString();
+ _val133 = iprot.readString();
+ struct.queryOptions.put(_key132, _val133);
}
}
struct.setQueryOptionsIsSet(true);
@@ -2547,14 +2526,14 @@ public class BrokerRequest implements org.apache.thrift.TBase<BrokerRequest, Bro
}
if (incoming.get(17)) {
{
- org.apache.thrift.protocol.TList _list127 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
- struct.orderBy = new java.util.ArrayList<SelectionSort>(_list127.size);
- @org.apache.thrift.annotation.Nullable SelectionSort _elem128;
- for (int _i129 = 0; _i129 < _list127.size; ++_i129)
+ org.apache.thrift.protocol.TList _list135 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.orderBy = new java.util.ArrayList<SelectionSort>(_list135.size);
+ @org.apache.thrift.annotation.Nullable SelectionSort _elem136;
+ for (int _i137 = 0; _i137 < _list135.size; ++_i137)
{
- _elem128 = new SelectionSort();
- _elem128.read(iprot);
- struct.orderBy.add(_elem128);
+ _elem136 = new SelectionSort();
+ _elem136.read(iprot);
+ struct.orderBy.add(_elem136);
}
}
struct.setOrderByIsSet(true);
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/DataSource.java b/pinot-common/src/main/java/org/apache/pinot/common/request/DataSource.java
index 9c7cff4..a5c903e 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/DataSource.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/DataSource.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class DataSource implements org.apache.thrift.TBase<DataSource, DataSource._Fields>, java.io.Serializable, Cloneable, Comparable<DataSource> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DataSource");
@@ -34,7 +34,7 @@ public class DataSource implements org.apache.thrift.TBase<DataSource, DataSourc
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new DataSourceStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new DataSourceTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.lang.String tableName; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String tableName; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -133,9 +133,8 @@ public class DataSource implements org.apache.thrift.TBase<DataSource, DataSourc
return this.tableName;
}
- public DataSource setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) {
+ public void setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) {
this.tableName = tableName;
- return this;
}
public void unsetTableName() {
@@ -332,8 +331,6 @@ public class DataSource implements org.apache.thrift.TBase<DataSource, DataSourc
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/Expression.java b/pinot-common/src/main/java/org/apache/pinot/common/request/Expression.java
index 240b2bb..477c73d 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/Expression.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/Expression.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class Expression implements org.apache.thrift.TBase<Expression, Expression._Fields>, java.io.Serializable, Cloneable, Comparable<Expression> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Expression");
@@ -37,14 +37,10 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ExpressionStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ExpressionTupleSchemeFactory();
- /**
- *
- * @see ExpressionType
- */
- public @org.apache.thrift.annotation.Nullable ExpressionType type; // required
- public @org.apache.thrift.annotation.Nullable Function functionCall; // optional
- public @org.apache.thrift.annotation.Nullable Literal literal; // optional
- public @org.apache.thrift.annotation.Nullable Identifier identifier; // optional
+ private @org.apache.thrift.annotation.Nullable ExpressionType type; // required
+ private @org.apache.thrift.annotation.Nullable Function functionCall; // optional
+ private @org.apache.thrift.annotation.Nullable Literal literal; // optional
+ private @org.apache.thrift.annotation.Nullable Identifier identifier; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -189,9 +185,8 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
*
* @see ExpressionType
*/
- public Expression setType(@org.apache.thrift.annotation.Nullable ExpressionType type) {
+ public void setType(@org.apache.thrift.annotation.Nullable ExpressionType type) {
this.type = type;
- return this;
}
public void unsetType() {
@@ -214,9 +209,8 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
return this.functionCall;
}
- public Expression setFunctionCall(@org.apache.thrift.annotation.Nullable Function functionCall) {
+ public void setFunctionCall(@org.apache.thrift.annotation.Nullable Function functionCall) {
this.functionCall = functionCall;
- return this;
}
public void unsetFunctionCall() {
@@ -239,9 +233,8 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
return this.literal;
}
- public Expression setLiteral(@org.apache.thrift.annotation.Nullable Literal literal) {
+ public void setLiteral(@org.apache.thrift.annotation.Nullable Literal literal) {
this.literal = literal;
- return this;
}
public void unsetLiteral() {
@@ -264,9 +257,8 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
return this.identifier;
}
- public Expression setIdentifier(@org.apache.thrift.annotation.Nullable Identifier identifier) {
+ public void setIdentifier(@org.apache.thrift.annotation.Nullable Identifier identifier) {
this.identifier = identifier;
- return this;
}
public void unsetIdentifier() {
@@ -548,9 +540,10 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
public void validate() throws org.apache.thrift.TException {
// check for required fields
- if (type == null) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'type' was not present! Struct: " + toString());
+ if (!isSetType()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'type' is unset! Struct:" + toString());
}
+
// check for sub-struct validity
}
@@ -629,8 +622,6 @@ public class Expression implements org.apache.thrift.TBase<Expression, Expressio
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/ExpressionType.java b/pinot-common/src/main/java/org/apache/pinot/common/request/ExpressionType.java
index de06b54..80e0871 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/ExpressionType.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/ExpressionType.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public enum ExpressionType implements org.apache.thrift.TEnum {
LITERAL(0),
IDENTIFIER(1),
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/FilterOperator.java b/pinot-common/src/main/java/org/apache/pinot/common/request/FilterOperator.java
index b6ccc9e..fbf177b 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/FilterOperator.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/FilterOperator.java
@@ -28,9 +28,9 @@ package org.apache.pinot.common.request;
/**
* AUTO GENERATED: DO NOT EDIT
* Filter Operator
- *
+ *
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-01-16")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public enum FilterOperator implements org.apache.thrift.TEnum {
AND(0),
OR(1),
@@ -42,7 +42,8 @@ public enum FilterOperator implements org.apache.thrift.TEnum {
IN(7),
IS_NULL(8),
IS_NOT_NULL(9),
- TEXT_MATCH(10);
+ TEXT_MATCH(10),
+ JSON_MATCH(11);
private final int value;
@@ -62,7 +63,7 @@ public enum FilterOperator implements org.apache.thrift.TEnum {
* @return null if the value is not found.
*/
@org.apache.thrift.annotation.Nullable
- public static FilterOperator findByValue(int value) {
+ public static FilterOperator findByValue(int value) {
switch (value) {
case 0:
return AND;
@@ -86,6 +87,8 @@ public enum FilterOperator implements org.apache.thrift.TEnum {
return IS_NOT_NULL;
case 10:
return TEXT_MATCH;
+ case 11:
+ return JSON_MATCH;
default:
return null;
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQuery.java b/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQuery.java
index 62c1e72..2261850 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQuery.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQuery.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Filter query
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQuery._Fields>, java.io.Serializable, Cloneable, Comparable<FilterQuery> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FilterQuery");
@@ -43,18 +43,11 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new FilterQueryStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new FilterQueryTupleSchemeFactory();
- public int id; // required
- /**
- * This should be unique within a single request *
- */
- public @org.apache.thrift.annotation.Nullable java.lang.String column; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value; // required
- /**
- *
- * @see FilterOperator
- */
- public @org.apache.thrift.annotation.Nullable FilterOperator operator; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds; // required
+ private int id; // required
+ private @org.apache.thrift.annotation.Nullable java.lang.String column; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value; // required
+ private @org.apache.thrift.annotation.Nullable FilterOperator operator; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -213,10 +206,9 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
return this.id;
}
- public FilterQuery setId(int id) {
+ public void setId(int id) {
this.id = id;
setIdIsSet(true);
- return this;
}
public void unsetId() {
@@ -243,9 +235,8 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
/**
* This should be unique within a single request *
*/
- public FilterQuery setColumn(@org.apache.thrift.annotation.Nullable java.lang.String column) {
+ public void setColumn(@org.apache.thrift.annotation.Nullable java.lang.String column) {
this.column = column;
- return this;
}
public void unsetColumn() {
@@ -284,9 +275,8 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
return this.value;
}
- public FilterQuery setValue(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value) {
+ public void setValue(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value) {
this.value = value;
- return this;
}
public void unsetValue() {
@@ -317,9 +307,8 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
*
* @see FilterOperator
*/
- public FilterQuery setOperator(@org.apache.thrift.annotation.Nullable FilterOperator operator) {
+ public void setOperator(@org.apache.thrift.annotation.Nullable FilterOperator operator) {
this.operator = operator;
- return this;
}
public void unsetOperator() {
@@ -358,9 +347,8 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
return this.nestedFilterQueryIds;
}
- public FilterQuery setNestedFilterQueryIds(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds) {
+ public void setNestedFilterQueryIds(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds) {
this.nestedFilterQueryIds = nestedFilterQueryIds;
- return this;
}
public void unsetNestedFilterQueryIds() {
@@ -678,7 +666,10 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
public void validate() throws org.apache.thrift.TException {
// check for required fields
- // alas, we cannot check 'id' because it's a primitive and you chose the non-beans generator.
+ if (!isSetId()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'id' is unset! Struct:" + toString());
+ }
+
// check for sub-struct validity
}
@@ -784,11 +775,6 @@ public class FilterQuery implements org.apache.thrift.TBase<FilterQuery, FilterQ
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
- if (!struct.isSetId()) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'id' was not found in serialized data! Struct: " + toString());
- }
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQueryMap.java b/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQueryMap.java
index fa9902b..fcafcbc 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQueryMap.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/FilterQueryMap.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Filter Query is nested but thrift stable version does not support yet (The support is there in top of the trunk but no released jars. Two concerns : stability and onus of maintaining a stable point. Also, its pretty difficult to compile thrift in Linkedin software development environment which is not geared towards c++ dev. Hence, the )
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class FilterQueryMap implements org.apache.thrift.TBase<FilterQueryMap, FilterQueryMap._Fields>, java.io.Serializable, Cloneable, Comparable<FilterQueryMap> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FilterQueryMap");
@@ -39,7 +39,7 @@ public class FilterQueryMap implements org.apache.thrift.TBase<FilterQueryMap, F
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new FilterQueryMapStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new FilterQueryMapTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,FilterQuery> filterQueryMap; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,FilterQuery> filterQueryMap; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -163,9 +163,8 @@ public class FilterQueryMap implements org.apache.thrift.TBase<FilterQueryMap, F
return this.filterQueryMap;
}
- public FilterQueryMap setFilterQueryMap(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,FilterQuery> filterQueryMap) {
+ public void setFilterQueryMap(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,FilterQuery> filterQueryMap) {
this.filterQueryMap = filterQueryMap;
- return this;
}
public void unsetFilterQueryMap() {
@@ -375,8 +374,6 @@ public class FilterQueryMap implements org.apache.thrift.TBase<FilterQueryMap, F
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/Function.java b/pinot-common/src/main/java/org/apache/pinot/common/request/Function.java
index 300dd3e..5010c53 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/Function.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/Function.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class Function implements org.apache.thrift.TBase<Function, Function._Fields>, java.io.Serializable, Cloneable, Comparable<Function> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Function");
@@ -35,8 +35,8 @@ public class Function implements org.apache.thrift.TBase<Function, Function._Fie
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new FunctionStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new FunctionTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.lang.String operator; // required
- public @org.apache.thrift.annotation.Nullable java.util.List<Expression> operands; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String operator; // required
+ private @org.apache.thrift.annotation.Nullable java.util.List<Expression> operands; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -156,9 +156,8 @@ public class Function implements org.apache.thrift.TBase<Function, Function._Fie
return this.operator;
}
- public Function setOperator(@org.apache.thrift.annotation.Nullable java.lang.String operator) {
+ public void setOperator(@org.apache.thrift.annotation.Nullable java.lang.String operator) {
this.operator = operator;
- return this;
}
public void unsetOperator() {
@@ -197,9 +196,8 @@ public class Function implements org.apache.thrift.TBase<Function, Function._Fie
return this.operands;
}
- public Function setOperands(@org.apache.thrift.annotation.Nullable java.util.List<Expression> operands) {
+ public void setOperands(@org.apache.thrift.annotation.Nullable java.util.List<Expression> operands) {
this.operands = operands;
- return this;
}
public void unsetOperands() {
@@ -389,9 +387,10 @@ public class Function implements org.apache.thrift.TBase<Function, Function._Fie
public void validate() throws org.apache.thrift.TException {
// check for required fields
- if (operator == null) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'operator' was not present! Struct: " + toString());
+ if (!isSetOperator()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'operator' is unset! Struct:" + toString());
}
+
// check for sub-struct validity
}
@@ -462,8 +461,6 @@ public class Function implements org.apache.thrift.TBase<Function, Function._Fie
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/GroupBy.java b/pinot-common/src/main/java/org/apache/pinot/common/request/GroupBy.java
index f055b11..08f255d 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/GroupBy.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/GroupBy.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* GroupBy
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields>, java.io.Serializable, Cloneable, Comparable<GroupBy> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GroupBy");
@@ -41,9 +41,9 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new GroupByStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new GroupByTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> columns; // optional
- public long topN; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> columns; // optional
+ private long topN; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -182,9 +182,8 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
return this.columns;
}
- public GroupBy setColumns(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> columns) {
+ public void setColumns(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> columns) {
this.columns = columns;
- return this;
}
public void unsetColumns() {
@@ -206,10 +205,9 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
return this.topN;
}
- public GroupBy setTopN(long topN) {
+ public void setTopN(long topN) {
this.topN = topN;
setTopNIsSet(true);
- return this;
}
public void unsetTopN() {
@@ -246,9 +244,8 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
return this.expressions;
}
- public GroupBy setExpressions(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions) {
+ public void setExpressions(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> expressions) {
this.expressions = expressions;
- return this;
}
public void unsetExpressions() {
@@ -524,13 +521,13 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
case 1: // COLUMNS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list62 = iprot.readListBegin();
- struct.columns = new java.util.ArrayList<java.lang.String>(_list62.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem63;
- for (int _i64 = 0; _i64 < _list62.size; ++_i64)
+ org.apache.thrift.protocol.TList _list70 = iprot.readListBegin();
+ struct.columns = new java.util.ArrayList<java.lang.String>(_list70.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem71;
+ for (int _i72 = 0; _i72 < _list70.size; ++_i72)
{
- _elem63 = iprot.readString();
- struct.columns.add(_elem63);
+ _elem71 = iprot.readString();
+ struct.columns.add(_elem71);
}
iprot.readListEnd();
}
@@ -550,13 +547,13 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
case 3: // EXPRESSIONS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list65 = iprot.readListBegin();
- struct.expressions = new java.util.ArrayList<java.lang.String>(_list65.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem66;
- for (int _i67 = 0; _i67 < _list65.size; ++_i67)
+ org.apache.thrift.protocol.TList _list73 = iprot.readListBegin();
+ struct.expressions = new java.util.ArrayList<java.lang.String>(_list73.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem74;
+ for (int _i75 = 0; _i75 < _list73.size; ++_i75)
{
- _elem66 = iprot.readString();
- struct.expressions.add(_elem66);
+ _elem74 = iprot.readString();
+ struct.expressions.add(_elem74);
}
iprot.readListEnd();
}
@@ -571,8 +568,6 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
@@ -585,9 +580,9 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
oprot.writeFieldBegin(COLUMNS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.columns.size()));
- for (java.lang.String _iter68 : struct.columns)
+ for (java.lang.String _iter76 : struct.columns)
{
- oprot.writeString(_iter68);
+ oprot.writeString(_iter76);
}
oprot.writeListEnd();
}
@@ -604,9 +599,9 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
oprot.writeFieldBegin(EXPRESSIONS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.expressions.size()));
- for (java.lang.String _iter69 : struct.expressions)
+ for (java.lang.String _iter77 : struct.expressions)
{
- oprot.writeString(_iter69);
+ oprot.writeString(_iter77);
}
oprot.writeListEnd();
}
@@ -644,9 +639,9 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
if (struct.isSetColumns()) {
{
oprot.writeI32(struct.columns.size());
- for (java.lang.String _iter70 : struct.columns)
+ for (java.lang.String _iter78 : struct.columns)
{
- oprot.writeString(_iter70);
+ oprot.writeString(_iter78);
}
}
}
@@ -656,9 +651,9 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
if (struct.isSetExpressions()) {
{
oprot.writeI32(struct.expressions.size());
- for (java.lang.String _iter71 : struct.expressions)
+ for (java.lang.String _iter79 : struct.expressions)
{
- oprot.writeString(_iter71);
+ oprot.writeString(_iter79);
}
}
}
@@ -670,13 +665,13 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
java.util.BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
{
- org.apache.thrift.protocol.TList _list72 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.columns = new java.util.ArrayList<java.lang.String>(_list72.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem73;
- for (int _i74 = 0; _i74 < _list72.size; ++_i74)
+ org.apache.thrift.protocol.TList _list80 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.columns = new java.util.ArrayList<java.lang.String>(_list80.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem81;
+ for (int _i82 = 0; _i82 < _list80.size; ++_i82)
{
- _elem73 = iprot.readString();
- struct.columns.add(_elem73);
+ _elem81 = iprot.readString();
+ struct.columns.add(_elem81);
}
}
struct.setColumnsIsSet(true);
@@ -687,13 +682,13 @@ public class GroupBy implements org.apache.thrift.TBase<GroupBy, GroupBy._Fields
}
if (incoming.get(2)) {
{
- org.apache.thrift.protocol.TList _list75 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.expressions = new java.util.ArrayList<java.lang.String>(_list75.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem76;
- for (int _i77 = 0; _i77 < _list75.size; ++_i77)
+ org.apache.thrift.protocol.TList _list83 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.expressions = new java.util.ArrayList<java.lang.String>(_list83.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem84;
+ for (int _i85 = 0; _i85 < _list83.size; ++_i85)
{
- _elem76 = iprot.readString();
- struct.expressions.add(_elem76);
+ _elem84 = iprot.readString();
+ struct.expressions.add(_elem84);
}
}
struct.setExpressionsIsSet(true);
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQuery.java b/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQuery.java
index f2cda61..29aa39f 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQuery.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQuery.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Having Filter query
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQuery, HavingFilterQuery._Fields>, java.io.Serializable, Cloneable, Comparable<HavingFilterQuery> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HavingFilterQuery");
@@ -43,18 +43,11 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new HavingFilterQueryStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new HavingFilterQueryTupleSchemeFactory();
- public int id; // required
- /**
- * This should be unique within a single request *
- */
- public @org.apache.thrift.annotation.Nullable AggregationInfo aggregationInfo; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value; // required
- /**
- *
- * @see FilterOperator
- */
- public @org.apache.thrift.annotation.Nullable FilterOperator operator; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds; // required
+ private int id; // required
+ private @org.apache.thrift.annotation.Nullable AggregationInfo aggregationInfo; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value; // required
+ private @org.apache.thrift.annotation.Nullable FilterOperator operator; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -213,10 +206,9 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
return this.id;
}
- public HavingFilterQuery setId(int id) {
+ public void setId(int id) {
this.id = id;
setIdIsSet(true);
- return this;
}
public void unsetId() {
@@ -243,9 +235,8 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
/**
* This should be unique within a single request *
*/
- public HavingFilterQuery setAggregationInfo(@org.apache.thrift.annotation.Nullable AggregationInfo aggregationInfo) {
+ public void setAggregationInfo(@org.apache.thrift.annotation.Nullable AggregationInfo aggregationInfo) {
this.aggregationInfo = aggregationInfo;
- return this;
}
public void unsetAggregationInfo() {
@@ -284,9 +275,8 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
return this.value;
}
- public HavingFilterQuery setValue(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value) {
+ public void setValue(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> value) {
this.value = value;
- return this;
}
public void unsetValue() {
@@ -317,9 +307,8 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
*
* @see FilterOperator
*/
- public HavingFilterQuery setOperator(@org.apache.thrift.annotation.Nullable FilterOperator operator) {
+ public void setOperator(@org.apache.thrift.annotation.Nullable FilterOperator operator) {
this.operator = operator;
- return this;
}
public void unsetOperator() {
@@ -358,9 +347,8 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
return this.nestedFilterQueryIds;
}
- public HavingFilterQuery setNestedFilterQueryIds(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds) {
+ public void setNestedFilterQueryIds(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> nestedFilterQueryIds) {
this.nestedFilterQueryIds = nestedFilterQueryIds;
- return this;
}
public void unsetNestedFilterQueryIds() {
@@ -678,7 +666,10 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
public void validate() throws org.apache.thrift.TException {
// check for required fields
- // alas, we cannot check 'id' because it's a primitive and you chose the non-beans generator.
+ if (!isSetId()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'id' is unset! Struct:" + toString());
+ }
+
// check for sub-struct validity
}
@@ -785,11 +776,6 @@ public class HavingFilterQuery implements org.apache.thrift.TBase<HavingFilterQu
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
- if (!struct.isSetId()) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'id' was not found in serialized data! Struct: " + toString());
- }
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQueryMap.java b/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQueryMap.java
index 4dbb6f9..67af8d9 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQueryMap.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/HavingFilterQueryMap.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class HavingFilterQueryMap implements org.apache.thrift.TBase<HavingFilterQueryMap, HavingFilterQueryMap._Fields>, java.io.Serializable, Cloneable, Comparable<HavingFilterQueryMap> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HavingFilterQueryMap");
@@ -34,7 +34,7 @@ public class HavingFilterQueryMap implements org.apache.thrift.TBase<HavingFilte
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new HavingFilterQueryMapStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new HavingFilterQueryMapTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,HavingFilterQuery> filterQueryMap; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,HavingFilterQuery> filterQueryMap; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -158,9 +158,8 @@ public class HavingFilterQueryMap implements org.apache.thrift.TBase<HavingFilte
return this.filterQueryMap;
}
- public HavingFilterQueryMap setFilterQueryMap(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,HavingFilterQuery> filterQueryMap) {
+ public void setFilterQueryMap(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.Integer,HavingFilterQuery> filterQueryMap) {
this.filterQueryMap = filterQueryMap;
- return this;
}
public void unsetFilterQueryMap() {
@@ -370,8 +369,6 @@ public class HavingFilterQueryMap implements org.apache.thrift.TBase<HavingFilte
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/Identifier.java b/pinot-common/src/main/java/org/apache/pinot/common/request/Identifier.java
index 6ae7ab2..9fbb9b5 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/Identifier.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/Identifier.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class Identifier implements org.apache.thrift.TBase<Identifier, Identifier._Fields>, java.io.Serializable, Cloneable, Comparable<Identifier> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Identifier");
@@ -34,7 +34,7 @@ public class Identifier implements org.apache.thrift.TBase<Identifier, Identifie
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new IdentifierStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new IdentifierTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.lang.String name; // required
+ private @org.apache.thrift.annotation.Nullable java.lang.String name; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -139,9 +139,8 @@ public class Identifier implements org.apache.thrift.TBase<Identifier, Identifie
return this.name;
}
- public Identifier setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
+ public void setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {
this.name = name;
- return this;
}
public void unsetName() {
@@ -285,9 +284,10 @@ public class Identifier implements org.apache.thrift.TBase<Identifier, Identifie
public void validate() throws org.apache.thrift.TException {
// check for required fields
- if (name == null) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'name' was not present! Struct: " + toString());
+ if (!isSetName()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'name' is unset! Struct:" + toString());
}
+
// check for sub-struct validity
}
@@ -339,8 +339,6 @@ public class Identifier implements org.apache.thrift.TBase<Identifier, Identifie
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/InstanceRequest.java b/pinot-common/src/main/java/org/apache/pinot/common/request/InstanceRequest.java
index 534b6bb..202cfdf 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/InstanceRequest.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/InstanceRequest.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Instance Request
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest, InstanceRequest._Fields>, java.io.Serializable, Cloneable, Comparable<InstanceRequest> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InstanceRequest");
@@ -43,11 +43,11 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new InstanceRequestStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new InstanceRequestTupleSchemeFactory();
- public long requestId; // required
- public @org.apache.thrift.annotation.Nullable BrokerRequest query; // required
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> searchSegments; // optional
- public boolean enableTrace; // optional
- public @org.apache.thrift.annotation.Nullable java.lang.String brokerId; // optional
+ private long requestId; // required
+ private @org.apache.thrift.annotation.Nullable BrokerRequest query; // required
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> searchSegments; // optional
+ private boolean enableTrace; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String brokerId; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -195,10 +195,9 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
return this.requestId;
}
- public InstanceRequest setRequestId(long requestId) {
+ public void setRequestId(long requestId) {
this.requestId = requestId;
setRequestIdIsSet(true);
- return this;
}
public void unsetRequestId() {
@@ -219,9 +218,8 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
return this.query;
}
- public InstanceRequest setQuery(@org.apache.thrift.annotation.Nullable BrokerRequest query) {
+ public void setQuery(@org.apache.thrift.annotation.Nullable BrokerRequest query) {
this.query = query;
- return this;
}
public void unsetQuery() {
@@ -260,9 +258,8 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
return this.searchSegments;
}
- public InstanceRequest setSearchSegments(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> searchSegments) {
+ public void setSearchSegments(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> searchSegments) {
this.searchSegments = searchSegments;
- return this;
}
public void unsetSearchSegments() {
@@ -284,10 +281,9 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
return this.enableTrace;
}
- public InstanceRequest setEnableTrace(boolean enableTrace) {
+ public void setEnableTrace(boolean enableTrace) {
this.enableTrace = enableTrace;
setEnableTraceIsSet(true);
- return this;
}
public void unsetEnableTrace() {
@@ -308,9 +304,8 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
return this.brokerId;
}
- public InstanceRequest setBrokerId(@org.apache.thrift.annotation.Nullable java.lang.String brokerId) {
+ public void setBrokerId(@org.apache.thrift.annotation.Nullable java.lang.String brokerId) {
this.brokerId = brokerId;
- return this;
}
public void unsetBrokerId() {
@@ -626,10 +621,14 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
public void validate() throws org.apache.thrift.TException {
// check for required fields
- // alas, we cannot check 'requestId' because it's a primitive and you chose the non-beans generator.
- if (query == null) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'query' was not present! Struct: " + toString());
+ if (!isSetRequestId()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'requestId' is unset! Struct:" + toString());
}
+
+ if (!isSetQuery()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'query' is unset! Struct:" + toString());
+ }
+
// check for sub-struct validity
if (query != null) {
query.validate();
@@ -692,13 +691,13 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
case 3: // SEARCH_SEGMENTS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list130 = iprot.readListBegin();
- struct.searchSegments = new java.util.ArrayList<java.lang.String>(_list130.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem131;
- for (int _i132 = 0; _i132 < _list130.size; ++_i132)
+ org.apache.thrift.protocol.TList _list138 = iprot.readListBegin();
+ struct.searchSegments = new java.util.ArrayList<java.lang.String>(_list138.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem139;
+ for (int _i140 = 0; _i140 < _list138.size; ++_i140)
{
- _elem131 = iprot.readString();
- struct.searchSegments.add(_elem131);
+ _elem139 = iprot.readString();
+ struct.searchSegments.add(_elem139);
}
iprot.readListEnd();
}
@@ -729,11 +728,6 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
- if (!struct.isSetRequestId()) {
- throw new org.apache.thrift.protocol.TProtocolException("Required field 'requestId' was not found in serialized data! Struct: " + toString());
- }
struct.validate();
}
@@ -754,9 +748,9 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
oprot.writeFieldBegin(SEARCH_SEGMENTS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.searchSegments.size()));
- for (java.lang.String _iter133 : struct.searchSegments)
+ for (java.lang.String _iter141 : struct.searchSegments)
{
- oprot.writeString(_iter133);
+ oprot.writeString(_iter141);
}
oprot.writeListEnd();
}
@@ -808,9 +802,9 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
if (struct.isSetSearchSegments()) {
{
oprot.writeI32(struct.searchSegments.size());
- for (java.lang.String _iter134 : struct.searchSegments)
+ for (java.lang.String _iter142 : struct.searchSegments)
{
- oprot.writeString(_iter134);
+ oprot.writeString(_iter142);
}
}
}
@@ -833,13 +827,13 @@ public class InstanceRequest implements org.apache.thrift.TBase<InstanceRequest,
java.util.BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
{
- org.apache.thrift.protocol.TList _list135 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.searchSegments = new java.util.ArrayList<java.lang.String>(_list135.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem136;
- for (int _i137 = 0; _i137 < _list135.size; ++_i137)
+ org.apache.thrift.protocol.TList _list143 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.searchSegments = new java.util.ArrayList<java.lang.String>(_list143.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem144;
+ for (int _i145 = 0; _i145 < _list143.size; ++_i145)
{
- _elem136 = iprot.readString();
- struct.searchSegments.add(_elem136);
+ _elem144 = iprot.readString();
+ struct.searchSegments.add(_elem144);
}
}
struct.setSearchSegmentsIsSet(true);
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/Literal.java b/pinot-common/src/main/java/org/apache/pinot/common/request/Literal.java
index 7761eb4..5d25563 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/Literal.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/Literal.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class Literal extends org.apache.thrift.TUnion<Literal, Literal._Fields> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Literal");
private static final org.apache.thrift.protocol.TField BOOL_VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("boolValue", org.apache.thrift.protocol.TType.BOOL, (short)1);
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/PinotQuery.java b/pinot-common/src/main/java/org/apache/pinot/common/request/PinotQuery.java
index 10f1242..42cc48d 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/PinotQuery.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/PinotQuery.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -25,7 +25,7 @@
package org.apache.pinot.common.request;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuery._Fields>, java.io.Serializable, Cloneable, Comparable<PinotQuery> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PinotQuery");
@@ -44,17 +44,17 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new PinotQueryStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new PinotQueryTupleSchemeFactory();
- public int version; // optional
- public @org.apache.thrift.annotation.Nullable DataSource dataSource; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<Expression> selectList; // optional
- public @org.apache.thrift.annotation.Nullable Expression filterExpression; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<Expression> groupByList; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<Expression> orderByList; // optional
- public @org.apache.thrift.annotation.Nullable Expression havingExpression; // optional
- public int limit; // optional
- public int offset; // optional
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions; // optional
- public @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions; // optional
+ private int version; // optional
+ private @org.apache.thrift.annotation.Nullable DataSource dataSource; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<Expression> selectList; // optional
+ private @org.apache.thrift.annotation.Nullable Expression filterExpression; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<Expression> groupByList; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<Expression> orderByList; // optional
+ private @org.apache.thrift.annotation.Nullable Expression havingExpression; // optional
+ private int limit; // optional
+ private int offset; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -269,10 +269,9 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.version;
}
- public PinotQuery setVersion(int version) {
+ public void setVersion(int version) {
this.version = version;
setVersionIsSet(true);
- return this;
}
public void unsetVersion() {
@@ -293,9 +292,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.dataSource;
}
- public PinotQuery setDataSource(@org.apache.thrift.annotation.Nullable DataSource dataSource) {
+ public void setDataSource(@org.apache.thrift.annotation.Nullable DataSource dataSource) {
this.dataSource = dataSource;
- return this;
}
public void unsetDataSource() {
@@ -334,9 +332,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.selectList;
}
- public PinotQuery setSelectList(@org.apache.thrift.annotation.Nullable java.util.List<Expression> selectList) {
+ public void setSelectList(@org.apache.thrift.annotation.Nullable java.util.List<Expression> selectList) {
this.selectList = selectList;
- return this;
}
public void unsetSelectList() {
@@ -359,9 +356,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.filterExpression;
}
- public PinotQuery setFilterExpression(@org.apache.thrift.annotation.Nullable Expression filterExpression) {
+ public void setFilterExpression(@org.apache.thrift.annotation.Nullable Expression filterExpression) {
this.filterExpression = filterExpression;
- return this;
}
public void unsetFilterExpression() {
@@ -400,9 +396,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.groupByList;
}
- public PinotQuery setGroupByList(@org.apache.thrift.annotation.Nullable java.util.List<Expression> groupByList) {
+ public void setGroupByList(@org.apache.thrift.annotation.Nullable java.util.List<Expression> groupByList) {
this.groupByList = groupByList;
- return this;
}
public void unsetGroupByList() {
@@ -441,9 +436,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.orderByList;
}
- public PinotQuery setOrderByList(@org.apache.thrift.annotation.Nullable java.util.List<Expression> orderByList) {
+ public void setOrderByList(@org.apache.thrift.annotation.Nullable java.util.List<Expression> orderByList) {
this.orderByList = orderByList;
- return this;
}
public void unsetOrderByList() {
@@ -466,9 +460,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.havingExpression;
}
- public PinotQuery setHavingExpression(@org.apache.thrift.annotation.Nullable Expression havingExpression) {
+ public void setHavingExpression(@org.apache.thrift.annotation.Nullable Expression havingExpression) {
this.havingExpression = havingExpression;
- return this;
}
public void unsetHavingExpression() {
@@ -490,10 +483,9 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.limit;
}
- public PinotQuery setLimit(int limit) {
+ public void setLimit(int limit) {
this.limit = limit;
setLimitIsSet(true);
- return this;
}
public void unsetLimit() {
@@ -513,10 +505,9 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.offset;
}
- public PinotQuery setOffset(int offset) {
+ public void setOffset(int offset) {
this.offset = offset;
setOffsetIsSet(true);
- return this;
}
public void unsetOffset() {
@@ -548,9 +539,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.debugOptions;
}
- public PinotQuery setDebugOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions) {
+ public void setDebugOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> debugOptions) {
this.debugOptions = debugOptions;
- return this;
}
public void unsetDebugOptions() {
@@ -584,9 +574,8 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
return this.queryOptions;
}
- public PinotQuery setQueryOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions) {
+ public void setQueryOptions(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> queryOptions) {
this.queryOptions = queryOptions;
- return this;
}
public void unsetQueryOptions() {
@@ -1376,8 +1365,6 @@ public class PinotQuery implements org.apache.thrift.TBase<PinotQuery, PinotQuer
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/QuerySource.java b/pinot-common/src/main/java/org/apache/pinot/common/request/QuerySource.java
index 2ca4d62..49ca8ae 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/QuerySource.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/QuerySource.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Query source
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class QuerySource implements org.apache.thrift.TBase<QuerySource, QuerySource._Fields>, java.io.Serializable, Cloneable, Comparable<QuerySource> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("QuerySource");
@@ -39,7 +39,7 @@ public class QuerySource implements org.apache.thrift.TBase<QuerySource, QuerySo
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new QuerySourceStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new QuerySourceTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.lang.String tableName; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String tableName; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -138,9 +138,8 @@ public class QuerySource implements org.apache.thrift.TBase<QuerySource, QuerySo
return this.tableName;
}
- public QuerySource setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) {
+ public void setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) {
this.tableName = tableName;
- return this;
}
public void unsetTableName() {
@@ -337,8 +336,6 @@ public class QuerySource implements org.apache.thrift.TBase<QuerySource, QuerySo
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/QueryType.java b/pinot-common/src/main/java/org/apache/pinot/common/request/QueryType.java
index d7acfac..5a5ecfd 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/QueryType.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/QueryType.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Query type
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._Fields>, java.io.Serializable, Cloneable, Comparable<QueryType> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("QueryType");
@@ -43,11 +43,11 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new QueryTypeStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new QueryTypeTupleSchemeFactory();
- public boolean hasSelection; // optional
- public boolean hasFilter; // optional
- public boolean hasAggregation; // optional
- public boolean hasGroup_by; // optional
- public boolean hasHaving; // optional
+ private boolean hasSelection; // optional
+ private boolean hasFilter; // optional
+ private boolean hasAggregation; // optional
+ private boolean hasGroup_by; // optional
+ private boolean hasHaving; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -183,10 +183,9 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
return this.hasSelection;
}
- public QueryType setHasSelection(boolean hasSelection) {
+ public void setHasSelection(boolean hasSelection) {
this.hasSelection = hasSelection;
setHasSelectionIsSet(true);
- return this;
}
public void unsetHasSelection() {
@@ -206,10 +205,9 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
return this.hasFilter;
}
- public QueryType setHasFilter(boolean hasFilter) {
+ public void setHasFilter(boolean hasFilter) {
this.hasFilter = hasFilter;
setHasFilterIsSet(true);
- return this;
}
public void unsetHasFilter() {
@@ -229,10 +227,9 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
return this.hasAggregation;
}
- public QueryType setHasAggregation(boolean hasAggregation) {
+ public void setHasAggregation(boolean hasAggregation) {
this.hasAggregation = hasAggregation;
setHasAggregationIsSet(true);
- return this;
}
public void unsetHasAggregation() {
@@ -252,10 +249,9 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
return this.hasGroup_by;
}
- public QueryType setHasGroup_by(boolean hasGroup_by) {
+ public void setHasGroup_by(boolean hasGroup_by) {
this.hasGroup_by = hasGroup_by;
setHasGroup_byIsSet(true);
- return this;
}
public void unsetHasGroup_by() {
@@ -275,10 +271,9 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
return this.hasHaving;
}
- public QueryType setHasHaving(boolean hasHaving) {
+ public void setHasHaving(boolean hasHaving) {
this.hasHaving = hasHaving;
setHasHavingIsSet(true);
- return this;
}
public void unsetHasHaving() {
@@ -671,8 +666,6 @@ public class QueryType implements org.apache.thrift.TBase<QueryType, QueryType._
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/Selection.java b/pinot-common/src/main/java/org/apache/pinot/common/request/Selection.java
index c607038..05b89b5 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/Selection.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/Selection.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -30,7 +30,7 @@ package org.apache.pinot.common.request;
* Selection
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class Selection implements org.apache.thrift.TBase<Selection, Selection._Fields>, java.io.Serializable, Cloneable, Comparable<Selection> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Selection");
@@ -42,10 +42,10 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new SelectionStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new SelectionTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> selectionColumns; // optional
- public @org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> selectionSortSequence; // optional
- public int offset; // optional
- public int size; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> selectionColumns; // optional
+ private @org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> selectionSortSequence; // optional
+ private int offset; // optional
+ private int size; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -200,9 +200,8 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
return this.selectionColumns;
}
- public Selection setSelectionColumns(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> selectionColumns) {
+ public void setSelectionColumns(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> selectionColumns) {
this.selectionColumns = selectionColumns;
- return this;
}
public void unsetSelectionColumns() {
@@ -241,9 +240,8 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
return this.selectionSortSequence;
}
- public Selection setSelectionSortSequence(@org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> selectionSortSequence) {
+ public void setSelectionSortSequence(@org.apache.thrift.annotation.Nullable java.util.List<SelectionSort> selectionSortSequence) {
this.selectionSortSequence = selectionSortSequence;
- return this;
}
public void unsetSelectionSortSequence() {
@@ -265,10 +263,9 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
return this.offset;
}
- public Selection setOffset(int offset) {
+ public void setOffset(int offset) {
this.offset = offset;
setOffsetIsSet(true);
- return this;
}
public void unsetOffset() {
@@ -288,10 +285,9 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
return this.size;
}
- public Selection setSize(int size) {
+ public void setSize(int size) {
this.size = size;
setSizeIsSet(true);
- return this;
}
public void unsetSize() {
@@ -607,13 +603,13 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
case 1: // SELECTION_COLUMNS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list78 = iprot.readListBegin();
- struct.selectionColumns = new java.util.ArrayList<java.lang.String>(_list78.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem79;
- for (int _i80 = 0; _i80 < _list78.size; ++_i80)
+ org.apache.thrift.protocol.TList _list86 = iprot.readListBegin();
+ struct.selectionColumns = new java.util.ArrayList<java.lang.String>(_list86.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem87;
+ for (int _i88 = 0; _i88 < _list86.size; ++_i88)
{
- _elem79 = iprot.readString();
- struct.selectionColumns.add(_elem79);
+ _elem87 = iprot.readString();
+ struct.selectionColumns.add(_elem87);
}
iprot.readListEnd();
}
@@ -625,14 +621,14 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
case 2: // SELECTION_SORT_SEQUENCE
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list81 = iprot.readListBegin();
- struct.selectionSortSequence = new java.util.ArrayList<SelectionSort>(_list81.size);
- @org.apache.thrift.annotation.Nullable SelectionSort _elem82;
- for (int _i83 = 0; _i83 < _list81.size; ++_i83)
+ org.apache.thrift.protocol.TList _list89 = iprot.readListBegin();
+ struct.selectionSortSequence = new java.util.ArrayList<SelectionSort>(_list89.size);
+ @org.apache.thrift.annotation.Nullable SelectionSort _elem90;
+ for (int _i91 = 0; _i91 < _list89.size; ++_i91)
{
- _elem82 = new SelectionSort();
- _elem82.read(iprot);
- struct.selectionSortSequence.add(_elem82);
+ _elem90 = new SelectionSort();
+ _elem90.read(iprot);
+ struct.selectionSortSequence.add(_elem90);
}
iprot.readListEnd();
}
@@ -663,8 +659,6 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
@@ -677,9 +671,9 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
oprot.writeFieldBegin(SELECTION_COLUMNS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.selectionColumns.size()));
- for (java.lang.String _iter84 : struct.selectionColumns)
+ for (java.lang.String _iter92 : struct.selectionColumns)
{
- oprot.writeString(_iter84);
+ oprot.writeString(_iter92);
}
oprot.writeListEnd();
}
@@ -691,9 +685,9 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
oprot.writeFieldBegin(SELECTION_SORT_SEQUENCE_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.selectionSortSequence.size()));
- for (SelectionSort _iter85 : struct.selectionSortSequence)
+ for (SelectionSort _iter93 : struct.selectionSortSequence)
{
- _iter85.write(oprot);
+ _iter93.write(oprot);
}
oprot.writeListEnd();
}
@@ -744,18 +738,18 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
if (struct.isSetSelectionColumns()) {
{
oprot.writeI32(struct.selectionColumns.size());
- for (java.lang.String _iter86 : struct.selectionColumns)
+ for (java.lang.String _iter94 : struct.selectionColumns)
{
- oprot.writeString(_iter86);
+ oprot.writeString(_iter94);
}
}
}
if (struct.isSetSelectionSortSequence()) {
{
oprot.writeI32(struct.selectionSortSequence.size());
- for (SelectionSort _iter87 : struct.selectionSortSequence)
+ for (SelectionSort _iter95 : struct.selectionSortSequence)
{
- _iter87.write(oprot);
+ _iter95.write(oprot);
}
}
}
@@ -773,27 +767,27 @@ public class Selection implements org.apache.thrift.TBase<Selection, Selection._
java.util.BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
{
- org.apache.thrift.protocol.TList _list88 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.selectionColumns = new java.util.ArrayList<java.lang.String>(_list88.size);
- @org.apache.thrift.annotation.Nullable java.lang.String _elem89;
- for (int _i90 = 0; _i90 < _list88.size; ++_i90)
+ org.apache.thrift.protocol.TList _list96 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.selectionColumns = new java.util.ArrayList<java.lang.String>(_list96.size);
+ @org.apache.thrift.annotation.Nullable java.lang.String _elem97;
+ for (int _i98 = 0; _i98 < _list96.size; ++_i98)
{
- _elem89 = iprot.readString();
- struct.selectionColumns.add(_elem89);
+ _elem97 = iprot.readString();
+ struct.selectionColumns.add(_elem97);
}
}
struct.setSelectionColumnsIsSet(true);
}
if (incoming.get(1)) {
{
- org.apache.thrift.protocol.TList _list91 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
- struct.selectionSortSequence = new java.util.ArrayList<SelectionSort>(_list91.size);
- @org.apache.thrift.annotation.Nullable SelectionSort _elem92;
- for (int _i93 = 0; _i93 < _list91.size; ++_i93)
+ org.apache.thrift.protocol.TList _list99 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.selectionSortSequence = new java.util.ArrayList<SelectionSort>(_list99.size);
+ @org.apache.thrift.annotation.Nullable SelectionSort _elem100;
+ for (int _i101 = 0; _i101 < _list99.size; ++_i101)
{
- _elem92 = new SelectionSort();
- _elem92.read(iprot);
- struct.selectionSortSequence.add(_elem92);
+ _elem100 = new SelectionSort();
+ _elem100.read(iprot);
+ struct.selectionSortSequence.add(_elem100);
}
}
struct.setSelectionSortSequenceIsSet(true);
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/request/SelectionSort.java b/pinot-common/src/main/java/org/apache/pinot/common/request/SelectionSort.java
index dc79a52..c9c599b 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/request/SelectionSort.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/request/SelectionSort.java
@@ -17,7 +17,7 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.12.0)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
@@ -31,7 +31,7 @@ package org.apache.pinot.common.request;
* The results can be sorted based on one or multiple columns
*
*/
-@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-07-19")
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
public class SelectionSort implements org.apache.thrift.TBase<SelectionSort, SelectionSort._Fields>, java.io.Serializable, Cloneable, Comparable<SelectionSort> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SelectionSort");
@@ -41,8 +41,8 @@ public class SelectionSort implements org.apache.thrift.TBase<SelectionSort, Sel
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new SelectionSortStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new SelectionSortTupleSchemeFactory();
- public @org.apache.thrift.annotation.Nullable java.lang.String column; // optional
- public boolean isAsc; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String column; // optional
+ private boolean isAsc; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -152,9 +152,8 @@ public class SelectionSort implements org.apache.thrift.TBase<SelectionSort, Sel
return this.column;
}
- public SelectionSort setColumn(@org.apache.thrift.annotation.Nullable java.lang.String column) {
+ public void setColumn(@org.apache.thrift.annotation.Nullable java.lang.String column) {
this.column = column;
- return this;
}
public void unsetColumn() {
@@ -176,10 +175,9 @@ public class SelectionSort implements org.apache.thrift.TBase<SelectionSort, Sel
return this.isAsc;
}
- public SelectionSort setIsAsc(boolean isAsc) {
+ public void setIsAsc(boolean isAsc) {
this.isAsc = isAsc;
setIsAscIsSet(true);
- return this;
}
public void unsetIsAsc() {
@@ -426,8 +424,6 @@ public class SelectionSort implements org.apache.thrift.TBase<SelectionSort, Sel
iprot.readFieldEnd();
}
iprot.readStructEnd();
-
- // check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/common/response/ProcessingException.java b/pinot-common/src/main/java/org/apache/pinot/common/response/ProcessingException.java
index 1338363..2ac8c04 100644
--- a/pinot-common/src/main/java/org/apache/pinot/common/response/ProcessingException.java
+++ b/pinot-common/src/main/java/org/apache/pinot/common/response/ProcessingException.java
@@ -17,65 +17,40 @@
* under the License.
*/
/**
- * Autogenerated by Thrift Compiler (0.9.3)
+ * Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.pinot.common.response;
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import javax.annotation.Generated;
-import org.apache.thrift.EncodingUtils;
-import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TTupleProtocol;
-import org.apache.thrift.scheme.IScheme;
-import org.apache.thrift.scheme.SchemeFactory;
-import org.apache.thrift.scheme.StandardScheme;
-import org.apache.thrift.scheme.TupleScheme;
-
-
-@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
/**
* Processing exception
- *
+ *
*/
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2019-06-06")
-public class ProcessingException extends TException implements org.apache.thrift.TBase<ProcessingException, ProcessingException._Fields>, java.io.Serializable, Cloneable, Comparable<ProcessingException> {
- private static final org.apache.thrift.protocol.TStruct STRUCT_DESC =
- new org.apache.thrift.protocol.TStruct("ProcessingException");
-
- private static final org.apache.thrift.protocol.TField ERROR_CODE_FIELD_DESC =
- new org.apache.thrift.protocol.TField("errorCode", org.apache.thrift.protocol.TType.I32, (short) 1);
- private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC =
- new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short) 2);
+@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2020-10-31")
+public class ProcessingException extends org.apache.thrift.TException implements org.apache.thrift.TBase<ProcessingException, ProcessingException._Fields>, java.io.Serializable, Cloneable, Comparable<ProcessingException> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ProcessingException");
- private static final Map<Class<? extends IScheme>, SchemeFactory> schemes =
- new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ private static final org.apache.thrift.protocol.TField ERROR_CODE_FIELD_DESC = new org.apache.thrift.protocol.TField("errorCode", org.apache.thrift.protocol.TType.I32, (short)1);
+ private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)2);
- static {
- schemes.put(StandardScheme.class, new ProcessingExceptionStandardSchemeFactory());
- schemes.put(TupleScheme.class, new ProcessingExceptionTupleSchemeFactory());
- }
+ private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ProcessingExceptionStandardSchemeFactory();
+ private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ProcessingExceptionTupleSchemeFactory();
private int errorCode; // required
- private String message; // optional
+ private @org.apache.thrift.annotation.Nullable java.lang.String message; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
- ERROR_CODE((short) 1, "errorCode"), MESSAGE((short) 2, "message");
+ ERROR_CODE((short)1, "errorCode"),
+ MESSAGE((short)2, "message");
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+ private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
@@ -83,8 +58,9 @@ public class ProcessingException extends TException implements org.apache.thrift
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
+ @org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
- switch (fieldId) {
+ switch(fieldId) {
case 1: // ERROR_CODE
return ERROR_CODE;
case 2: // MESSAGE
@@ -100,23 +76,22 @@ public class ProcessingException extends TException implements org.apache.thrift
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
- if (fields == null) {
- throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- }
+ if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
- public static _Fields findByName(String name) {
+ @org.apache.thrift.annotation.Nullable
+ public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
- private final String _fieldName;
+ private final java.lang.String _fieldName;
- _Fields(short thriftId, String fieldName) {
+ _Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
@@ -125,7 +100,7 @@ public class ProcessingException extends TException implements org.apache.thrift
return _thriftId;
}
- public String getFieldName() {
+ public java.lang.String getFieldName() {
return _fieldName;
}
}
@@ -134,25 +109,23 @@ public class ProcessingException extends TException implements org.apache.thrift
private static final int __ERRORCODE_ISSET_ID = 0;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.MESSAGE};
- public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-
+ public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
- Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap =
- new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ERROR_CODE,
- new org.apache.thrift.meta_data.FieldMetaData("errorCode", org.apache.thrift.TFieldRequirementType.REQUIRED,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
- tmpMap.put(_Fields.MESSAGE,
- new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
+ java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.ERROR_CODE, new org.apache.thrift.meta_data.FieldMetaData("errorCode", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ProcessingException.class, metaDataMap);
}
public ProcessingException() {
}
- public ProcessingException(int errorCode) {
+ public ProcessingException(
+ int errorCode)
+ {
this();
this.errorCode = errorCode;
setErrorCodeIsSet(true);
@@ -190,23 +163,24 @@ public class ProcessingException extends TException implements org.apache.thrift
}
public void unsetErrorCode() {
- __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ERRORCODE_ISSET_ID);
+ __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ERRORCODE_ISSET_ID);
}
/** Returns true if field errorCode is set (has been assigned a value) and false otherwise */
public boolean isSetErrorCode() {
- return EncodingUtils.testBit(__isset_bitfield, __ERRORCODE_ISSET_ID);
+ return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ERRORCODE_ISSET_ID);
}
public void setErrorCodeIsSet(boolean value) {
- __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ERRORCODE_ISSET_ID, value);
+ __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ERRORCODE_ISSET_ID, value);
}
- public String getMessage() {
+ @org.apache.thrift.annotation.Nullable
+ public java.lang.String getMessage() {
return this.message;
}
- public void setMessage(String message) {
+ public void setMessage(@org.apache.thrift.annotation.Nullable java.lang.String message) {
this.message = message;
}
@@ -225,88 +199,86 @@ public class ProcessingException extends TException implements org.apache.thrift
}
}
- public void setFieldValue(_Fields field, Object value) {
+ public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
- case ERROR_CODE:
- if (value == null) {
- unsetErrorCode();
- } else {
- setErrorCode((Integer) value);
- }
- break;
+ case ERROR_CODE:
+ if (value == null) {
+ unsetErrorCode();
+ } else {
+ setErrorCode((java.lang.Integer)value);
+ }
+ break;
+
+ case MESSAGE:
+ if (value == null) {
+ unsetMessage();
+ } else {
+ setMessage((java.lang.String)value);
+ }
+ break;
- case MESSAGE:
- if (value == null) {
- unsetMessage();
- } else {
- setMessage((String) value);
- }
- break;
}
}
- public Object getFieldValue(_Fields field) {
+ @org.apache.thrift.annotation.Nullable
+ public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
- case ERROR_CODE:
- return getErrorCode();
+ case ERROR_CODE:
+ return getErrorCode();
+
+ case MESSAGE:
+ return getMessage();
- case MESSAGE:
- return getMessage();
}
- throw new IllegalStateException();
+ throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
- throw new IllegalArgumentException();
+ throw new java.lang.IllegalArgumentException();
}
switch (field) {
- case ERROR_CODE:
- return isSetErrorCode();
- case MESSAGE:
- return isSetMessage();
+ case ERROR_CODE:
+ return isSetErrorCode();
+ case MESSAGE:
+ return isSetMessage();
}
- throw new IllegalStateException();
+ throw new java.lang.IllegalStateException();
}
@Override
- public boolean equals(Object that) {
- if (that == null) {
+ public boolean equals(java.lang.Object that) {
+ if (that == null)
return false;
- }
- if (that instanceof ProcessingException) {
- return this.equals((ProcessingException) that);
- }
+ if (that instanceof ProcessingException)
+ return this.equals((ProcessingException)that);
return false;
}
public boolean equals(ProcessingException that) {
- if (that == null) {
+ if (that == null)
return false;
- }
+ if (this == that)
+ return true;
boolean this_present_errorCode = true;
boolean that_present_errorCode = true;
if (this_present_errorCode || that_present_errorCode) {
- if (!(this_present_errorCode && that_present_errorCode)) {
+ if (!(this_present_errorCode && that_present_errorCode))
return false;
- }
- if (this.errorCode != that.errorCode) {
+ if (this.errorCode != that.errorCode)
return false;
- }
}
boolean this_present_message = true && this.isSetMessage();
boolean that_present_message = true && that.isSetMessage();
if (this_present_message || that_present_message) {
- if (!(this_present_message && that_present_message)) {
+ if (!(this_present_message && that_present_message))
return false;
- }
- if (!this.message.equals(that.message)) {
+ if (!this.message.equals(that.message))
return false;
- }
}
return true;
@@ -314,21 +286,15 @@ public class ProcessingException extends TException implements org.apache.thrift
@Override
public int hashCode() {
- List<Object> list = new ArrayList<Object>();
+ int hashCode = 1;
- boolean present_errorCode = true;
- list.add(present_errorCode);
- if (present_errorCode) {
- list.add(errorCode);
- }
+ hashCode = hashCode * 8191 + errorCode;
- boolean present_message = true && (isSetMessage());
- list.add(present_message);
- if (present_message) {
- list.add(message);
- }
+ hashCode = hashCode * 8191 + ((isSetMessage()) ? 131071 : 524287);
+ if (isSetMessage())
+ hashCode = hashCode * 8191 + message.hashCode();
- return list.hashCode();
+ return hashCode;
}
@Override
@@ -339,7 +305,7 @@ public class ProcessingException extends TException implements org.apache.thrift
int lastComparison = 0;
- lastComparison = Boolean.valueOf(isSetErrorCode()).compareTo(other.isSetErrorCode());
+ lastComparison = java.lang.Boolean.valueOf(isSetErrorCode()).compareTo(other.isSetErrorCode());
if (lastComparison != 0) {
return lastComparison;
}
@@ -349,7 +315,7 @@ public class ProcessingException extends TException implements org.apache.thrift
return lastComparison;
}
}
- lastComparison = Boolean.valueOf(isSetMessage()).compareTo(other.isSetMessage());
+ lastComparison = java.lang.Boolean.valueOf(isSetMessage()).compareTo(other.isSetMessage());
if (lastComparison != 0) {
return lastComparison;
}
@@ -362,32 +328,29 @@ public class ProcessingException extends TException implements org.apache.thrift
return 0;
}
+ @org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
- public void read(org.apache.thrift.protocol.TProtocol iprot)
- throws org.apache.thrift.TException {
- schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ scheme(iprot).read(iprot, this);
}
- public void write(org.apache.thrift.protocol.TProtocol oprot)
- throws org.apache.thrift.TException {
- schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ scheme(oprot).write(oprot, this);
}
@Override
- public String toString() {
- StringBuilder sb = new StringBuilder("ProcessingException(");
+ public java.lang.String toString() {
+ java.lang.StringBuilder sb = new java.lang.StringBuilder("ProcessingException(");
boolean first = true;
sb.append("errorCode:");
sb.append(this.errorCode);
first = false;
if (isSetMessage()) {
- if (!first) {
- sb.append(", ");
- }
+ if (!first) sb.append(", ");
sb.append("message:");
if (this.message == null) {
sb.append("null");
@@ -400,19 +363,16 @@ public class ProcessingException extends TException implements org.apache.thrift
return sb.toString();
}
- public void validate()
- throws org.apache.thrift.TException {
+ public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetErrorCode()) {
- throw new org.apache.thrift.protocol.TProtocolException(
- "Required field 'errorCode' is unset! Struct:" + toString());
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'errorCode' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
- private void writeObject(java.io.ObjectOutputStream out)
- throws java.io.IOException {
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
@@ -420,8 +380,7 @@ public class ProcessingException extends TException implements org.apache.thrift
}
}
- private void readObject(java.io.ObjectInputStream in)
- throws java.io.IOException, ClassNotFoundException {
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
@@ -431,21 +390,21 @@ public class ProcessingException extends TException implements org.apache.thrift
}
}
- private static class ProcessingExceptionStandardSchemeFactory implements SchemeFactory {
+ private static class ProcessingExceptionStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ProcessingExceptionStandardScheme getScheme() {
return new ProcessingExceptionStandardScheme();
}
}
- private static class ProcessingExceptionStandardScheme extends StandardScheme<ProcessingException> {
+ private static class ProcessingExceptionStandardScheme extends org.apache.thrift.scheme.StandardScheme<ProcessingException> {
- public void read(org.apache.thrift.protocol.TProtocol iprot, ProcessingException struct)
- throws org.apache.thrift.TException {
+ public void read(org.apache.thrift.protocol.TProtocol iprot, ProcessingException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
- while (true) {
+ while (true)
+ {
schemeField = iprot.readFieldBegin();
- if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
@@ -453,7 +412,7 @@ public class ProcessingException extends TException implements org.apache.thrift
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.errorCode = iprot.readI32();
struct.setErrorCodeIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -461,7 +420,7 @@ public class ProcessingException extends TException implements org.apache.thrift
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
- } else {
+ } else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
@@ -474,8 +433,7 @@ public class ProcessingException extends TException implements org.apache.thrift
struct.validate();
}
- public void write(org.apache.thrift.protocol.TProtocol oprot, ProcessingException struct)
- throws org.apache.thrift.TException {
+ public void write(org.apache.thrift.protocol.TProtocol oprot, ProcessingException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
@@ -492,22 +450,22 @@ public class ProcessingException extends TException implements org.apache.thrift
oprot.writeFieldStop();
oprot.writeStructEnd();
}
+
}
- private static class ProcessingExceptionTupleSchemeFactory implements SchemeFactory {
+ private static class ProcessingExceptionTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ProcessingExceptionTupleScheme getScheme() {
return new ProcessingExceptionTupleScheme();
}
}
- private static class ProcessingExceptionTupleScheme extends TupleScheme<ProcessingException> {
+ private static class ProcessingExceptionTupleScheme extends org.apache.thrift.scheme.TupleScheme<ProcessingException> {
@Override
- public void write(org.apache.thrift.protocol.TProtocol prot, ProcessingException struct)
- throws org.apache.thrift.TException {
- TTupleProtocol oprot = (TTupleProtocol) prot;
+ public void write(org.apache.thrift.protocol.TProtocol prot, ProcessingException struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
oprot.writeI32(struct.errorCode);
- BitSet optionals = new BitSet();
+ java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetMessage()) {
optionals.set(0);
}
@@ -518,17 +476,20 @@ public class ProcessingException extends TException implements org.apache.thrift
}
@Override
- public void read(org.apache.thrift.protocol.TProtocol prot, ProcessingException struct)
- throws org.apache.thrift.TException {
- TTupleProtocol iprot = (TTupleProtocol) prot;
+ public void read(org.apache.thrift.protocol.TProtocol prot, ProcessingException struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
struct.errorCode = iprot.readI32();
struct.setErrorCodeIsSet(true);
- BitSet incoming = iprot.readBitSet(1);
+ java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
}
}
}
+
+ private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
+ return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
+ }
}
diff --git a/pinot-common/src/main/java/org/apache/pinot/parsers/utils/ParserUtils.java b/pinot-common/src/main/java/org/apache/pinot/parsers/utils/ParserUtils.java
index 9614d33..c5e6d98 100644
--- a/pinot-common/src/main/java/org/apache/pinot/parsers/utils/ParserUtils.java
+++ b/pinot-common/src/main/java/org/apache/pinot/parsers/utils/ParserUtils.java
@@ -57,6 +57,7 @@ public class ParserUtils {
FILTER_OPERATOR_MAP.put(FilterKind.IS_NULL, FilterOperator.IS_NULL);
FILTER_OPERATOR_MAP.put(FilterKind.IS_NOT_NULL, FilterOperator.IS_NOT_NULL);
FILTER_OPERATOR_MAP.put(FilterKind.TEXT_MATCH, FilterOperator.TEXT_MATCH);
+ FILTER_OPERATOR_MAP.put(FilterKind.JSON_MATCH, FilterOperator.JSON_MATCH);
}
/**
diff --git a/pinot-common/src/main/java/org/apache/pinot/pql/parsers/PinotQuery2BrokerRequestConverter.java b/pinot-common/src/main/java/org/apache/pinot/pql/parsers/PinotQuery2BrokerRequestConverter.java
index d0c7bb5..64fd640 100644
--- a/pinot-common/src/main/java/org/apache/pinot/pql/parsers/PinotQuery2BrokerRequestConverter.java
+++ b/pinot-common/src/main/java/org/apache/pinot/pql/parsers/PinotQuery2BrokerRequestConverter.java
@@ -271,6 +271,7 @@ public class PinotQuery2BrokerRequestConverter {
case IN:
case TEXT_MATCH:
case RANGE:
+ case JSON_MATCH:
//first operand is the always the column
filterQuery.setColumn(ParserUtils.standardizeExpression(operands.get(0), false));
filterQuery.setValue(ParserUtils.getFilterValues(filterKind, operands));
diff --git a/pinot-common/src/main/java/org/apache/pinot/pql/parsers/pql2/ast/FilterKind.java b/pinot-common/src/main/java/org/apache/pinot/pql/parsers/pql2/ast/FilterKind.java
index 4e861a0..7c12367 100644
--- a/pinot-common/src/main/java/org/apache/pinot/pql/parsers/pql2/ast/FilterKind.java
+++ b/pinot-common/src/main/java/org/apache/pinot/pql/parsers/pql2/ast/FilterKind.java
@@ -34,7 +34,8 @@ public enum FilterKind {
REGEXP_LIKE,
IS_NULL,
IS_NOT_NULL,
- TEXT_MATCH;
+ TEXT_MATCH,
+ JSON_MATCH;
/**
* Helper method that returns true if the enum maps to a Range.
diff --git a/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java b/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java
index 5275a1a..a69ac33 100644
--- a/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java
+++ b/pinot-common/src/main/java/org/apache/pinot/sql/parsers/CalciteSqlParser.java
@@ -551,8 +551,10 @@ public class CalciteSqlParser {
Identifier identifierKey = expression.getIdentifier();
if (identifierKey != null && aliasMap.containsKey(identifierKey)) {
Expression aliasExpression = aliasMap.get(identifierKey);
- expression.setType(aliasExpression.getType()).setIdentifier(aliasExpression.getIdentifier())
- .setFunctionCall(aliasExpression.getFunctionCall()).setLiteral(aliasExpression.getLiteral());
+ expression.setType(aliasExpression.getType());
+ expression.setIdentifier(aliasExpression.getIdentifier());
+ expression.setFunctionCall(aliasExpression.getFunctionCall());
+ expression.setLiteral(aliasExpression.getLiteral());
}
Function function = expression.getFunctionCall();
if (function != null) {
@@ -651,7 +653,7 @@ public class CalciteSqlParser {
while (iterator.hasNext()) {
SqlNode next = iterator.next();
Expression columnExpression = toExpression(next);
- if (columnExpression.getType() == ExpressionType.IDENTIFIER && columnExpression.getIdentifier().name
+ if (columnExpression.getType() == ExpressionType.IDENTIFIER && columnExpression.getIdentifier().getName()
.equals("*")) {
throw new SqlCompilationException(
"Syntax error: Pinot currently does not support DISTINCT with *. Please specify each column name after DISTINCT keyword");
diff --git a/pinot-common/src/thrift/request.thrift b/pinot-common/src/thrift/request.thrift
index 36518a7..b853456 100644
--- a/pinot-common/src/thrift/request.thrift
+++ b/pinot-common/src/thrift/request.thrift
@@ -34,7 +34,8 @@ enum FilterOperator {
IN,
IS_NULL,
IS_NOT_NULL,
- TEXT_MATCH
+ TEXT_MATCH,
+ JSON_MATCH
}
/**
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/common/DataSource.java b/pinot-core/src/main/java/org/apache/pinot/core/common/DataSource.java
index 75f0513..e250443 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/common/DataSource.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/common/DataSource.java
@@ -23,6 +23,7 @@ import org.apache.pinot.core.segment.index.readers.BloomFilterReader;
import org.apache.pinot.core.segment.index.readers.Dictionary;
import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReader;
import org.apache.pinot.core.segment.index.readers.TextIndexReader;
@@ -67,6 +68,12 @@ public interface DataSource {
TextIndexReader getTextIndex();
/**
+ * Returns the text index for the column if exists, or {@code null} if not.
+ */
+ @Nullable
+ JSONIndexReader getJSONIndex();
+
+ /**
* Returns the bloom filter for the column if exists, or {@code null} if not.
*/
@Nullable
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/indexsegment/mutable/MutableSegmentImpl.java b/pinot-core/src/main/java/org/apache/pinot/core/indexsegment/mutable/MutableSegmentImpl.java
index 5b038b1..14d6e29 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/indexsegment/mutable/MutableSegmentImpl.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/indexsegment/mutable/MutableSegmentImpl.java
@@ -60,6 +60,7 @@ import org.apache.pinot.core.segment.index.metadata.SegmentMetadata;
import org.apache.pinot.core.segment.index.metadata.SegmentMetadataImpl;
import org.apache.pinot.core.segment.index.readers.BloomFilterReader;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.MutableForwardIndex;
import org.apache.pinot.core.segment.index.readers.ValidDocIndexReader;
import org.apache.pinot.core.segment.index.readers.ValidDocIndexReaderImpl;
@@ -326,10 +327,10 @@ public class MutableSegmentImpl implements MutableSegment {
// Null value vector
MutableNullValueVector nullValueVector = _nullHandlingEnabled ? new MutableNullValueVector() : null;
- // TODO: Support range index and bloom filter for mutable segment
+ // TODO: Support range index and bloom filter and json index for mutable segment
_indexContainerMap.put(column,
new IndexContainer(fieldSpec, partitionFunction, partitions, new NumValuesInfo(), forwardIndex, dictionary,
- invertedIndexReader, null, textIndex, null, nullValueVector));
+ invertedIndexReader, null, textIndex, null, nullValueVector, null));
}
if (_realtimeLuceneReaders != null) {
@@ -1039,6 +1040,7 @@ public class MutableSegmentImpl implements MutableSegment {
final RealtimeLuceneTextIndexReader _textIndex;
final BloomFilterReader _bloomFilter;
final MutableNullValueVector _nullValueVector;
+ final JSONIndexReader _jsonIndex;
volatile Comparable _minValue;
volatile Comparable _maxValue;
@@ -1051,7 +1053,8 @@ public class MutableSegmentImpl implements MutableSegment {
@Nullable Set<Integer> partitions, NumValuesInfo numValuesInfo, MutableForwardIndex forwardIndex,
@Nullable BaseMutableDictionary dictionary, @Nullable RealtimeInvertedIndexReader invertedIndex,
@Nullable InvertedIndexReader rangeIndex, @Nullable RealtimeLuceneTextIndexReader textIndex,
- @Nullable BloomFilterReader bloomFilter, @Nullable MutableNullValueVector nullValueVector) {
+ @Nullable BloomFilterReader bloomFilter, @Nullable MutableNullValueVector nullValueVector,
+ JSONIndexReader jsonIndex) {
_fieldSpec = fieldSpec;
_partitionFunction = partitionFunction;
_partitions = partitions;
@@ -1063,12 +1066,13 @@ public class MutableSegmentImpl implements MutableSegment {
_textIndex = textIndex;
_bloomFilter = bloomFilter;
_nullValueVector = nullValueVector;
+ _jsonIndex = jsonIndex;
}
DataSource toDataSource() {
return new MutableDataSource(_fieldSpec, _numDocsIndexed, _numValuesInfo._numValues,
_numValuesInfo._maxNumValuesPerMVEntry, _partitionFunction, _partitions, _minValue, _maxValue, _forwardIndex,
- _dictionary, _invertedIndex, _rangeIndex, _textIndex, _bloomFilter, _nullValueVector);
+ _dictionary, _invertedIndex, _rangeIndex, _textIndex, _bloomFilter, _nullValueVector, _jsonIndex);
}
@Override
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/io/util/VarLengthBytesValueReaderWriter.java b/pinot-core/src/main/java/org/apache/pinot/core/io/util/VarLengthBytesValueReaderWriter.java
index 5a6a25a..88f56f6 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/io/util/VarLengthBytesValueReaderWriter.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/io/util/VarLengthBytesValueReaderWriter.java
@@ -18,6 +18,9 @@
*/
package org.apache.pinot.core.io.util;
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
import java.util.Arrays;
import org.apache.pinot.common.utils.StringUtil;
import org.apache.pinot.core.segment.memory.PinotDataBuffer;
@@ -63,19 +66,19 @@ public class VarLengthBytesValueReaderWriter implements ValueReader {
/**
* Magic bytes used to identify the dictionary files written in variable length bytes format.
*/
- private static final byte[] MAGIC_BYTES = StringUtil.encodeUtf8(".vl;");
+ public static final byte[] MAGIC_BYTES = StringUtil.encodeUtf8(".vl;");
/**
* Increment this version if there are any structural changes in the store format and
* deal with backward compatibility correctly based on old versions.
*/
- private static final int VERSION = 1;
+ public static final int VERSION = 1;
// Offsets of different fields in the header. Having as constants for readability.
- private static final int VERSION_OFFSET = MAGIC_BYTES.length;
- private static final int NUM_ELEMENTS_OFFSET = VERSION_OFFSET + Integer.BYTES;
- private static final int DATA_SECTION_OFFSET_POSITION = NUM_ELEMENTS_OFFSET + Integer.BYTES;
- private static final int HEADER_LENGTH = DATA_SECTION_OFFSET_POSITION + Integer.BYTES;
+ public static final int VERSION_OFFSET = MAGIC_BYTES.length;
+ public static final int NUM_ELEMENTS_OFFSET = VERSION_OFFSET + Integer.BYTES;
+ public static final int DATA_SECTION_OFFSET_POSITION = NUM_ELEMENTS_OFFSET + Integer.BYTES;
+ public static final int HEADER_LENGTH = DATA_SECTION_OFFSET_POSITION + Integer.BYTES;
private final PinotDataBuffer _dataBuffer;
@@ -144,6 +147,19 @@ public class VarLengthBytesValueReaderWriter implements ValueReader {
return false;
}
+ public static byte[] getHeaderBytes(int numElements)
+ throws IOException {
+
+ ByteArrayOutputStream out = new ByteArrayOutputStream(HEADER_LENGTH);
+ DataOutputStream dos = new DataOutputStream(out);
+ dos.write(MAGIC_BYTES);
+ dos.writeInt(VERSION);
+ dos.writeInt(numElements);
+ dos.writeInt(HEADER_LENGTH);
+ dos.close();
+ return out.toByteArray();
+ }
+
private void writeHeader() {
for (int offset = 0; offset < MAGIC_BYTES.length; offset++) {
_dataBuffer.putByte(offset, MAGIC_BYTES[offset]);
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/BitmapBasedFilterOperator.java b/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/BitmapBasedFilterOperator.java
index d9c25e1..9c307a3 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/BitmapBasedFilterOperator.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/BitmapBasedFilterOperator.java
@@ -38,9 +38,9 @@ public class BitmapBasedFilterOperator extends BaseFilterOperator {
private final boolean _exclusive;
private final int _numDocs;
- BitmapBasedFilterOperator(PredicateEvaluator predicateEvaluator, DataSource dataSource, int numDocs) {
+ public BitmapBasedFilterOperator(PredicateEvaluator predicateEvaluator, InvertedIndexReader invertedIndexReader, int numDocs) {
_predicateEvaluator = predicateEvaluator;
- _invertedIndexReader = dataSource.getInvertedIndex();
+ _invertedIndexReader = invertedIndexReader;
_docIds = null;
_exclusive = predicateEvaluator.isExclusive();
_numDocs = numDocs;
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/FilterOperatorUtils.java b/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/FilterOperatorUtils.java
index 3bc676e..5076ef6 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/FilterOperatorUtils.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/FilterOperatorUtils.java
@@ -65,7 +65,7 @@ public class FilterOperatorUtils {
return new SortedIndexBasedFilterOperator(predicateEvaluator, dataSource, numDocs);
}
if (dataSource.getInvertedIndex() != null) {
- return new BitmapBasedFilterOperator(predicateEvaluator, dataSource, numDocs);
+ return new BitmapBasedFilterOperator(predicateEvaluator, dataSource.getInvertedIndex(), numDocs);
}
return new ScanBasedFilterOperator(predicateEvaluator, dataSource, numDocs);
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/JSONMatchFilterOperator.java b/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/JSONMatchFilterOperator.java
new file mode 100644
index 0000000..954f5f0
--- /dev/null
+++ b/pinot-core/src/main/java/org/apache/pinot/core/operator/filter/JSONMatchFilterOperator.java
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.operator.filter;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.pinot.core.operator.blocks.FilterBlock;
+import org.apache.pinot.core.operator.docidsets.BitmapDocIdSet;
+import org.apache.pinot.core.query.request.context.ExpressionContext;
+import org.apache.pinot.core.query.request.context.FilterContext;
+import org.apache.pinot.core.query.request.context.predicate.EqPredicate;
+import org.apache.pinot.core.query.request.context.predicate.InPredicate;
+import org.apache.pinot.core.query.request.context.predicate.NotEqPredicate;
+import org.apache.pinot.core.query.request.context.predicate.NotInPredicate;
+import org.apache.pinot.core.query.request.context.predicate.Predicate;
+import org.apache.pinot.core.segment.creator.impl.inv.JSONIndexCreator;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
+import org.roaringbitmap.buffer.ImmutableRoaringBitmap;
+import org.roaringbitmap.buffer.MutableRoaringBitmap;
+
+
+@SuppressWarnings("rawtypes")
+public class JSONMatchFilterOperator extends BaseFilterOperator {
+ private static final String OPERATOR_NAME = "JSONMatchFilterOperator";
+
+ private final JSONIndexReader _jsonIndexReader;
+ private final int _numDocs;
+ private String _column;
+ private final FilterContext _filterContext;
+
+ public JSONMatchFilterOperator(String column, FilterContext filterContext, JSONIndexReader jsonIndexReader,
+ int numDocs) {
+ _column = column;
+ _filterContext = filterContext;
+ _jsonIndexReader = jsonIndexReader;
+ _numDocs = numDocs;
+ }
+
+ @Override
+ protected FilterBlock getNextBlock() {
+ ImmutableRoaringBitmap flattenedDocIds = process(_filterContext);
+// System.out.println("flattenedDocIds = " + flattenedDocIds);
+ MutableRoaringBitmap rootDocIds = new MutableRoaringBitmap();
+ Iterator<Integer> iterator = flattenedDocIds.iterator();
+ while (iterator.hasNext()) {
+ int flattenedDocId = iterator.next();
+ rootDocIds.add(_jsonIndexReader.getRootDocId(flattenedDocId));
+ }
+// System.out.println("rootDocIds = " + rootDocIds);
+
+ return new FilterBlock(new BitmapDocIdSet(rootDocIds, _numDocs));
+ }
+
+ private MutableRoaringBitmap process(FilterContext filterContext) {
+ List<FilterContext> children = _filterContext.getChildren();
+ MutableRoaringBitmap resultBitmap = null;
+
+ switch (filterContext.getType()) {
+ case AND:
+ for (FilterContext child : children) {
+ if (resultBitmap == null) {
+ resultBitmap = process(child);
+ } else {
+ resultBitmap.and(process(child));
+ }
+ }
+ break;
+ case OR:
+ for (FilterContext child : children) {
+ if (resultBitmap == null) {
+ resultBitmap = process(child);
+ } else {
+ resultBitmap.or(process(child));
+ }
+ }
+ break;
+ case PREDICATE:
+ Predicate predicate = filterContext.getPredicate();
+ Predicate newPredicate = null;
+ switch (predicate.getType()) {
+
+ case EQ:
+ EqPredicate eqPredicate = (EqPredicate) predicate;
+ newPredicate = new EqPredicate(ExpressionContext.forIdentifier(_column),
+ eqPredicate.getLhs().getIdentifier() + JSONIndexCreator.POSTING_LIST_KEY_SEPARATOR + eqPredicate
+ .getValue());
+ break;
+ case NOT_EQ:
+ NotEqPredicate nEqPredicate = (NotEqPredicate) predicate;
+ newPredicate = new NotEqPredicate(ExpressionContext.forIdentifier(_column),
+ nEqPredicate.getLhs().getIdentifier() + JSONIndexCreator.POSTING_LIST_KEY_SEPARATOR
+ + nEqPredicate.getValue());
+ break;
+ case IN:
+ InPredicate inPredicate = (InPredicate) predicate;
+ List<String> newInValues = inPredicate.getValues().stream().map(
+ value -> inPredicate.getLhs().getIdentifier() + JSONIndexCreator.POSTING_LIST_KEY_SEPARATOR
+ + value).collect(Collectors.toList());
+ newPredicate = new InPredicate(ExpressionContext.forIdentifier(_column), newInValues);
+ break;
+ case NOT_IN:
+ NotInPredicate notInPredicate = (NotInPredicate) predicate;
+ List<String> newNotInValues = notInPredicate.getValues().stream().map(
+ value -> notInPredicate.getLhs().getIdentifier() + JSONIndexCreator.POSTING_LIST_KEY_SEPARATOR
+ + value).collect(Collectors.toList());
+ newPredicate = new InPredicate(ExpressionContext.forIdentifier(_column), newNotInValues);
+ break;
+ case IS_NULL:
+ newPredicate = predicate;
+ break;
+ case IS_NOT_NULL:
+ newPredicate = predicate;
+ break;
+ case RANGE:
+ case REGEXP_LIKE:
+ case TEXT_MATCH:
+ throw new UnsupportedOperationException("JSON Match does not support RANGE, REGEXP or TEXTMATCH");
+ }
+
+ resultBitmap = _jsonIndexReader.getMatchingDocIds(newPredicate);
+ break;
+ }
+
+ return resultBitmap;
+ }
+
+ @Override
+ public String getOperatorName() {
+ return OPERATOR_NAME;
+ }
+}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/CastTransformFunction.java b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/CastTransformFunction.java
index c826446..cb6227f 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/CastTransformFunction.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/operator/transform/function/CastTransformFunction.java
@@ -30,7 +30,7 @@ import org.apache.pinot.core.operator.transform.transformer.datetime.SDFToSDFTra
import org.apache.pinot.core.plan.DocIdSetPlanNode;
-public class CastTransformFunction extends BaseTransformFunction {
+ public class CastTransformFunction extends BaseTransformFunction {
public static final String FUNCTION_NAME = "cast";
private TransformFunction _transformFunction;
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/plan/FilterPlanNode.java b/pinot-core/src/main/java/org/apache/pinot/core/plan/FilterPlanNode.java
index 7e076e0..12cfc6f 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/plan/FilterPlanNode.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/plan/FilterPlanNode.java
@@ -23,6 +23,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
+import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.pinot.core.common.DataSource;
import org.apache.pinot.core.indexsegment.IndexSegment;
import org.apache.pinot.core.operator.filter.BaseFilterOperator;
@@ -30,6 +31,7 @@ import org.apache.pinot.core.operator.filter.BitmapBasedFilterOperator;
import org.apache.pinot.core.operator.filter.EmptyFilterOperator;
import org.apache.pinot.core.operator.filter.ExpressionFilterOperator;
import org.apache.pinot.core.operator.filter.FilterOperatorUtils;
+import org.apache.pinot.core.operator.filter.JSONMatchFilterOperator;
import org.apache.pinot.core.operator.filter.MatchAllFilterOperator;
import org.apache.pinot.core.operator.filter.TextMatchFilterOperator;
import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluator;
@@ -37,11 +39,14 @@ import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluatorProvide
import org.apache.pinot.core.query.request.context.ExpressionContext;
import org.apache.pinot.core.query.request.context.FilterContext;
import org.apache.pinot.core.query.request.context.QueryContext;
+import org.apache.pinot.core.query.request.context.predicate.JSONMatchPredicate;
import org.apache.pinot.core.query.request.context.predicate.Predicate;
import org.apache.pinot.core.query.request.context.predicate.TextMatchPredicate;
+import org.apache.pinot.core.query.request.context.utils.QueryContextConverterUtils;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReader;
import org.apache.pinot.core.segment.index.readers.ValidDocIndexReader;
import org.apache.pinot.core.util.QueryOptions;
+import org.apache.pinot.sql.parsers.CalciteSqlParser;
public class FilterPlanNode implements PlanNode {
@@ -129,6 +134,16 @@ public class FilterPlanNode implements PlanNode {
case TEXT_MATCH:
return new TextMatchFilterOperator(dataSource.getTextIndex(), ((TextMatchPredicate) predicate).getValue(),
_numDocs);
+ case JSON_MATCH:
+ FilterContext filterContext = null;
+ try {
+ filterContext = QueryContextConverterUtils
+ .getFilter(CalciteSqlParser.compileToExpression(((JSONMatchPredicate) predicate).getValue()));
+ } catch (SqlParseException e) {
+ throw new RuntimeException("Unable to parse the filter expression for json_match:"+ ((JSONMatchPredicate) predicate).getValue());
+ }
+ return new JSONMatchFilterOperator(lhs.getIdentifier(), filterContext, dataSource.getJSONIndex(),
+ _numDocs);
case IS_NULL:
NullValueVectorReader nullValueVector = dataSource.getNullValueVector();
if (nullValueVector != null) {
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java b/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/JSONMatchPredicate.java
similarity index 50%
copy from pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java
copy to pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/JSONMatchPredicate.java
index 203e950..97ad787 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/JSONMatchPredicate.java
@@ -18,30 +18,55 @@
*/
package org.apache.pinot.core.query.request.context.predicate;
+import java.util.Objects;
import org.apache.pinot.core.query.request.context.ExpressionContext;
/**
- * The {@code Predicate} class represents the predicate in the filter.
- * <p>Currently the query engine only accepts string literals as the right-hand side of the predicate, so we store the
- * right-hand side of the predicate as string or list of strings.
+ * Predicate for JSON_MATCH.
*/
-public interface Predicate {
- enum Type {
- EQ, NOT_EQ, IN, NOT_IN, RANGE, REGEXP_LIKE, TEXT_MATCH, IS_NULL, IS_NOT_NULL;
+public class JSONMatchPredicate implements Predicate {
+ private final ExpressionContext _lhs;
+ private final String _value;
- public boolean isExclusive() {
- return this == NOT_EQ || this == NOT_IN || this == IS_NOT_NULL;
+ public JSONMatchPredicate(ExpressionContext lhs, String value) {
+ _lhs = lhs;
+ _value = value;
+ }
+
+ @Override
+ public Type getType() {
+ return Type.JSON_MATCH;
+ }
+
+ @Override
+ public ExpressionContext getLhs() {
+ return _lhs;
+ }
+
+ public String getValue() {
+ return _value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof JSONMatchPredicate)) {
+ return false;
}
+ JSONMatchPredicate that = (JSONMatchPredicate) o;
+ return Objects.equals(_lhs, that._lhs) && Objects.equals(_value, that._value);
}
- /**
- * Returns the type of the predicate.
- */
- Type getType();
+ @Override
+ public int hashCode() {
+ return Objects.hash(_lhs, _value);
+ }
- /**
- * Returns the left-hand side expression of the predicate.
- */
- ExpressionContext getLhs();
+ @Override
+ public String toString() {
+ return "json_match(" + _lhs + ",'" + _value + "')";
+ }
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java b/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java
index 203e950..a204fc6 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/predicate/Predicate.java
@@ -28,7 +28,7 @@ import org.apache.pinot.core.query.request.context.ExpressionContext;
*/
public interface Predicate {
enum Type {
- EQ, NOT_EQ, IN, NOT_IN, RANGE, REGEXP_LIKE, TEXT_MATCH, IS_NULL, IS_NOT_NULL;
+ EQ, NOT_EQ, IN, NOT_IN, RANGE, REGEXP_LIKE, TEXT_MATCH, IS_NULL, IS_NOT_NULL, JSON_MATCH;
public boolean isExclusive() {
return this == NOT_EQ || this == NOT_IN || this == IS_NOT_NULL;
@@ -44,4 +44,5 @@ public interface Predicate {
* Returns the left-hand side expression of the predicate.
*/
ExpressionContext getLhs();
+
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/utils/QueryContextConverterUtils.java b/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/utils/QueryContextConverterUtils.java
index 581735b..deef50f 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/utils/QueryContextConverterUtils.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/query/request/context/utils/QueryContextConverterUtils.java
@@ -37,6 +37,7 @@ import org.apache.pinot.core.query.request.context.predicate.EqPredicate;
import org.apache.pinot.core.query.request.context.predicate.InPredicate;
import org.apache.pinot.core.query.request.context.predicate.IsNotNullPredicate;
import org.apache.pinot.core.query.request.context.predicate.IsNullPredicate;
+import org.apache.pinot.core.query.request.context.predicate.JSONMatchPredicate;
import org.apache.pinot.core.query.request.context.predicate.NotEqPredicate;
import org.apache.pinot.core.query.request.context.predicate.NotInPredicate;
import org.apache.pinot.core.query.request.context.predicate.RangePredicate;
@@ -290,6 +291,9 @@ public class QueryContextConverterUtils {
case TEXT_MATCH:
return new FilterContext(FilterContext.Type.PREDICATE, null,
new TextMatchPredicate(getExpression(node.getColumn()), node.getValue().get(0)));
+ case JSON_MATCH:
+ return new FilterContext(FilterContext.Type.PREDICATE, null,
+ new JSONMatchPredicate(getExpression(node.getColumn()), node.getValue().get(0)));
case IS_NULL:
return new FilterContext(FilterContext.Type.PREDICATE, null,
new IsNullPredicate(getExpression(node.getColumn())));
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/V1Constants.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/V1Constants.java
index 0498f8c..e745120 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/V1Constants.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/V1Constants.java
@@ -36,6 +36,7 @@ public class V1Constants {
public static final String RAW_SV_FORWARD_INDEX_FILE_EXTENSION = ".sv.raw.fwd";
public static final String UNSORTED_MV_FORWARD_INDEX_FILE_EXTENSION = ".mv.fwd";
public static final String BITMAP_INVERTED_INDEX_FILE_EXTENSION = ".bitmap.inv";
+ public static final String JSON_INDEX_FILE_EXTENSION = ".json.idx";
public static final String BITMAP_RANGE_INDEX_FILE_EXTENSION = ".bitmap.range";
public static final String BLOOM_FILTER_FILE_EXTENSION = ".bloom";
public static final String NULLVALUE_VECTOR_FILE_EXTENSION = ".bitmap.nullvalue";
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/inv/JSONIndexCreator.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/inv/JSONIndexCreator.java
new file mode 100644
index 0000000..4917993
--- /dev/null
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/inv/JSONIndexCreator.java
@@ -0,0 +1,658 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.segment.creator.impl.inv;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.wnameless.json.flattener.JsonFlattener;
+import com.google.common.io.Files;
+import com.google.common.primitives.UnsignedBytes;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.Closeable;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.TreeMap;
+import java.util.stream.Collectors;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.pinot.core.common.BlockDocIdIterator;
+import org.apache.pinot.core.io.util.VarLengthBytesValueReaderWriter;
+import org.apache.pinot.core.operator.blocks.FilterBlock;
+import org.apache.pinot.core.operator.filter.JSONMatchFilterOperator;
+import org.apache.pinot.core.query.request.context.ExpressionContext;
+import org.apache.pinot.core.query.request.context.FilterContext;
+import org.apache.pinot.core.query.request.context.predicate.EqPredicate;
+import org.apache.pinot.core.query.request.context.predicate.Predicate;
+import org.apache.pinot.core.query.request.context.utils.QueryContextConverterUtils;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
+import org.apache.pinot.core.segment.memory.PinotDataBuffer;
+import org.apache.pinot.spi.data.DimensionFieldSpec;
+import org.apache.pinot.spi.data.FieldSpec;
+import org.apache.pinot.sql.parsers.CalciteSqlParser;
+import org.roaringbitmap.RoaringBitmap;
+import org.roaringbitmap.buffer.MutableRoaringBitmap;
+
+import static org.apache.pinot.core.segment.creator.impl.V1Constants.Indexes.JSON_INDEX_FILE_EXTENSION;
+
+
+public class JSONIndexCreator implements Closeable {
+
+ //separator used to join the key and value to create posting list key
+ public static String POSTING_LIST_KEY_SEPARATOR = "|";
+ static int FLUSH_THRESHOLD = 50_000;
+ static int VERSION = 1;
+ private final File flattenedDocId2RootDocIdMappingFile;
+ private final File postingListFile;
+ private File dictionaryheaderFile;
+ private File dictionaryOffsetFile;
+ private File dictionaryFile;
+ private File invertedIndexOffsetFile;
+ private File invertedIndexFile;
+ private File outputIndexFile;
+
+ private int docId = 0;
+ private int numFlatennedDocId = 0;
+ int chunkId = 0;
+
+ private DataOutputStream postingListWriter;
+ private DataOutputStream flattenedDocId2RootDocIdWriter;
+
+ Map<String, List<Integer>> postingListMap = new TreeMap<>();
+ List<Integer> flattenedDocIdList = new ArrayList<>();
+ List<Integer> postingListChunkOffsets = new ArrayList<>();
+ List<Integer> chunkLengths = new ArrayList<>();
+ private FieldSpec fieldSpec;
+
+ public JSONIndexCreator(File indexDir, FieldSpec fieldSpec)
+ throws IOException {
+ this.fieldSpec = fieldSpec;
+ System.out.println("indexDir = " + indexDir);
+
+ String name = fieldSpec.getName();
+ postingListFile = new File(indexDir + name + "_postingList.buf");
+ postingListWriter = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(postingListFile)));
+ postingListChunkOffsets.add(postingListWriter.size());
+
+ dictionaryheaderFile = new File(indexDir, name + "_dictionaryHeader.buf");
+ dictionaryOffsetFile = new File(indexDir, name + "_dictionaryOffset.buf");
+ dictionaryFile = new File(indexDir, name + "_dictionary.buf");
+ invertedIndexOffsetFile = new File(indexDir, name + "_invertedIndexOffset.buf");
+ invertedIndexFile = new File(indexDir, name + "_invertedIndex.buf");
+ flattenedDocId2RootDocIdMappingFile = new File(indexDir, name + "_flattenedDocId.buf");
+ flattenedDocId2RootDocIdWriter =
+ new DataOutputStream(new BufferedOutputStream(new FileOutputStream(flattenedDocId2RootDocIdMappingFile)));
+
+ //output file
+ outputIndexFile = new File(indexDir, name + JSON_INDEX_FILE_EXTENSION);
+ }
+
+ public void add(byte[] data)
+ throws IOException {
+
+ JsonNode jsonNode = new ObjectMapper().readTree(data);
+ List<Map<String, String>> flattenedMapList = unnestJson(jsonNode);
+ for (Map<String, String> map : flattenedMapList) {
+ //
+ for (Map.Entry<String, String> entry : map.entrySet()) {
+ //handle key posting list
+ String key = entry.getKey();
+
+ List<Integer> keyPostingList = postingListMap.get(key);
+ if (keyPostingList == null) {
+ keyPostingList = new ArrayList<>();
+ postingListMap.put(key, keyPostingList);
+ }
+ keyPostingList.add(numFlatennedDocId);
+
+ //handle keyvalue posting list
+ String keyValue = key + POSTING_LIST_KEY_SEPARATOR + entry.getValue();
+ List<Integer> keyValuePostingList = postingListMap.get(keyValue);
+ if (keyValuePostingList == null) {
+ keyValuePostingList = new ArrayList<>();
+ postingListMap.put(keyValue, keyValuePostingList);
+ }
+ keyValuePostingList.add(numFlatennedDocId);
+ }
+ //flattenedDocId2RootDocIdMapping
+ flattenedDocIdList.add(docId);
+
+ numFlatennedDocId++;
+ }
+ docId++;
+
+ //flush data
+ if (docId % FLUSH_THRESHOLD == 0) {
+ flush();
+ }
+ }
+
+ /**
+ * Multi value
+ * @param dataArray
+ * @param length
+ * @throws IOException
+ */
+ public void add(byte[][] dataArray, int length)
+ throws IOException {
+
+ for (int i = 0; i < length; i++) {
+ byte[] data = dataArray[i];
+ JsonNode jsonNode = new ObjectMapper().readTree(data);
+ List<Map<String, String>> flattenedMapList = unnestJson(jsonNode);
+ for (Map<String, String> map : flattenedMapList) {
+ //
+ for (Map.Entry<String, String> entry : map.entrySet()) {
+ //handle key posting list
+ String key = entry.getKey();
+
+ List<Integer> keyPostingList = postingListMap.get(key);
+ if (keyPostingList == null) {
+ keyPostingList = new ArrayList<>();
+ postingListMap.put(key, keyPostingList);
+ }
+ keyPostingList.add(numFlatennedDocId);
+
+ //handle keyvalue posting list
+ String keyValue = key + POSTING_LIST_KEY_SEPARATOR + entry.getValue();
+ List<Integer> keyValuePostingList = postingListMap.get(keyValue);
+ if (keyValuePostingList == null) {
+ keyValuePostingList = new ArrayList<>();
+ postingListMap.put(keyValue, keyValuePostingList);
+ }
+ keyValuePostingList.add(numFlatennedDocId);
+ }
+ //flattenedDocId2RootDocIdMapping
+ flattenedDocIdList.add(numFlatennedDocId);
+
+ numFlatennedDocId++;
+ }
+ }
+ docId++;
+
+ //flush data
+ if (docId % FLUSH_THRESHOLD == 0) {
+ flush();
+ }
+ }
+
+ public void seal()
+ throws IOException {
+
+ flush();
+
+ flattenedDocId2RootDocIdWriter.close();
+ postingListWriter.close();
+
+ //key posting list merging
+ System.out.println("InvertedIndex");
+ System.out.println("=================");
+
+ int maxKeyLength = createInvertedIndex(postingListFile, postingListChunkOffsets, chunkLengths);
+ System.out.println("=================");
+
+ int flattenedDocid = 0;
+ DataInputStream flattenedDocId2RootDocIdReader =
+ new DataInputStream(new BufferedInputStream(new FileInputStream(flattenedDocId2RootDocIdMappingFile)));
+ int[] rootDocIdArray = new int[numFlatennedDocId];
+ while (flattenedDocid < numFlatennedDocId) {
+ rootDocIdArray[flattenedDocid++] = flattenedDocId2RootDocIdReader.readInt();
+ }
+ System.out.println("FlattenedDocId to RootDocId Mapping = ");
+ System.out.println(Arrays.toString(rootDocIdArray));
+
+ //PUT all contents into one file
+
+ //header
+ // version + maxDictionaryLength + [store the offsets + length for each one (dictionary offset file, dictionaryFile, index offset file, index file, flattened docId to rootDocId file)]
+ long headerSize = 2 * Integer.BYTES + 6 * 2 * Long.BYTES;
+
+ long dataSize =
+ dictionaryheaderFile.length() + dictionaryOffsetFile.length() + dictionaryFile.length() + invertedIndexFile
+ .length() + invertedIndexOffsetFile.length() + flattenedDocId2RootDocIdMappingFile.length();
+
+ long totalSize = headerSize + dataSize;
+ PinotDataBuffer pinotDataBuffer =
+ PinotDataBuffer.mapFile(outputIndexFile, false, 0, totalSize, ByteOrder.BIG_ENDIAN, "Nested inverted index");
+
+ pinotDataBuffer.putInt(0, VERSION);
+ pinotDataBuffer.putInt(1 * Integer.BYTES, maxKeyLength);
+ long writtenBytes = headerSize;
+
+ //add dictionary header
+ int bufferId = 0;
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId), writtenBytes);
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId) + Long.BYTES, dictionaryheaderFile.length());
+ pinotDataBuffer.readFrom(writtenBytes, dictionaryheaderFile, 0, dictionaryheaderFile.length());
+ writtenBytes += dictionaryheaderFile.length();
+
+ //add dictionary offset
+ bufferId = bufferId + 1;
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId), writtenBytes);
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId) + Long.BYTES, dictionaryOffsetFile.length());
+ pinotDataBuffer.readFrom(writtenBytes, dictionaryOffsetFile, 0, dictionaryOffsetFile.length());
+ writtenBytes += dictionaryOffsetFile.length();
+
+ //add dictionary
+ bufferId = bufferId + 1;
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId), writtenBytes);
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId) + Long.BYTES, dictionaryFile.length());
+ pinotDataBuffer.readFrom(writtenBytes, dictionaryFile, 0, dictionaryFile.length());
+ writtenBytes += dictionaryFile.length();
+
+ //add index offset
+ bufferId = bufferId + 1;
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId), writtenBytes);
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId) + Long.BYTES, invertedIndexOffsetFile.length());
+ pinotDataBuffer.readFrom(writtenBytes, invertedIndexOffsetFile, 0, invertedIndexOffsetFile.length());
+ writtenBytes += invertedIndexOffsetFile.length();
+
+ //add index data
+ bufferId = bufferId + 1;
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId), writtenBytes);
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId) + Long.BYTES, invertedIndexFile.length());
+ pinotDataBuffer.readFrom(writtenBytes, invertedIndexFile, 0, invertedIndexFile.length());
+ writtenBytes += invertedIndexFile.length();
+
+ //add flattened docid to root doc id mapping
+ bufferId = bufferId + 1;
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId), writtenBytes);
+ pinotDataBuffer.putLong(getBufferStartOffset(bufferId) + Long.BYTES, flattenedDocId2RootDocIdMappingFile.length());
+ pinotDataBuffer
+ .readFrom(writtenBytes, flattenedDocId2RootDocIdMappingFile, 0, flattenedDocId2RootDocIdMappingFile.length());
+ writtenBytes += flattenedDocId2RootDocIdMappingFile.length();
+ }
+
+ private long getBufferStartOffset(int bufferId) {
+ return 2 * Integer.BYTES + 2 * bufferId * Long.BYTES;
+ }
+
+ private int createInvertedIndex(File postingListFile, List<Integer> postingListChunkOffsets,
+ List<Integer> chunkLengthList)
+ throws IOException {
+
+ List<Iterator<ImmutablePair<byte[], int[]>>> chunkIterators = new ArrayList<>();
+
+ for (int i = 0; i < chunkLengthList.size(); i++) {
+
+ final DataInputStream postingListFileReader =
+ new DataInputStream(new BufferedInputStream(new FileInputStream(postingListFile)));
+ postingListFileReader.skipBytes(postingListChunkOffsets.get(i));
+ final int length = chunkLengthList.get(i);
+ chunkIterators.add(new Iterator<ImmutablePair<byte[], int[]>>() {
+ int index = 0;
+
+ @Override
+ public boolean hasNext() {
+ return index < length;
+ }
+
+ @Override
+ public ImmutablePair<byte[], int[]> next() {
+ try {
+ int keyLength = postingListFileReader.readInt();
+ byte[] keyBytes = new byte[keyLength];
+ postingListFileReader.read(keyBytes);
+
+ int postingListLength = postingListFileReader.readInt();
+ int[] postingList = new int[postingListLength];
+ for (int i = 0; i < postingListLength; i++) {
+ postingList[i] = postingListFileReader.readInt();
+ }
+ index++;
+ return ImmutablePair.of(keyBytes, postingList);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ }
+ final Comparator<byte[]> byteArrayComparator = UnsignedBytes.lexicographicalComparator();
+
+ PriorityQueue<ImmutablePair<Integer, ImmutablePair<byte[], int[]>>> queue =
+ new PriorityQueue<>(chunkLengthList.size(),
+ (o1, o2) -> byteArrayComparator.compare(o1.getRight().getLeft(), o2.getRight().getLeft()));
+ for (int i = 0; i < chunkIterators.size(); i++) {
+ Iterator<ImmutablePair<byte[], int[]>> iterator = chunkIterators.get(i);
+ if (iterator.hasNext()) {
+ queue.offer(ImmutablePair.of(i, iterator.next()));
+ }
+ }
+ byte[] prevKey = null;
+ RoaringBitmap roaringBitmap = new RoaringBitmap();
+
+ Writer writer = new Writer(dictionaryheaderFile, dictionaryOffsetFile, dictionaryFile, invertedIndexOffsetFile,
+ invertedIndexFile);
+ while (!queue.isEmpty()) {
+ ImmutablePair<Integer, ImmutablePair<byte[], int[]>> poll = queue.poll();
+ byte[] currKey = poll.getRight().getLeft();
+ if (prevKey != null && byteArrayComparator.compare(prevKey, currKey) != 0) {
+ System.out.println(new String(prevKey) + ":" + roaringBitmap);
+ writer.add(prevKey, roaringBitmap);
+ roaringBitmap.clear();
+ }
+
+ roaringBitmap.add(poll.getRight().getRight());
+ prevKey = currKey;
+
+ //add the next key from the chunk where the currKey was removed from
+ Iterator<ImmutablePair<byte[], int[]>> iterator = chunkIterators.get(poll.getLeft());
+ if (iterator.hasNext()) {
+ queue.offer(ImmutablePair.of(poll.getLeft(), iterator.next()));
+ }
+ }
+
+ if (prevKey != null) {
+ writer.add(prevKey, roaringBitmap);
+ }
+ writer.finish();
+ return writer.getMaxDictionaryValueLength();
+ }
+
+ private void flush()
+ throws IOException {
+ //write the key (length|actual bytes) - posting list(length, flattenedDocIds)
+ System.out.println("postingListMap = " + postingListMap);
+ for (Map.Entry<String, List<Integer>> entry : postingListMap.entrySet()) {
+ byte[] keyBytes = entry.getKey().getBytes(Charset.forName("UTF-8"));
+ postingListWriter.writeInt(keyBytes.length);
+ postingListWriter.write(keyBytes);
+ List<Integer> flattenedDocIdList = entry.getValue();
+ postingListWriter.writeInt(flattenedDocIdList.size());
+ for (int flattenedDocId : flattenedDocIdList) {
+ postingListWriter.writeInt(flattenedDocId);
+ }
+ }
+
+ //write flattened doc id to root docId mapping
+ for (int rootDocId : flattenedDocIdList) {
+ flattenedDocId2RootDocIdWriter.writeInt(rootDocId);
+ }
+ chunkLengths.add(postingListMap.size());
+ postingListChunkOffsets.add(postingListWriter.size());
+ postingListMap.clear();
+ flattenedDocIdList.clear();
+ }
+
+ private static List<Map<String, String>> unnestJson(JsonNode root) {
+ Iterator<Map.Entry<String, JsonNode>> fields = root.fields();
+ Map<String, String> flattenedSingleValuesMap = new TreeMap<>();
+ Map<String, JsonNode> arrNodes = new TreeMap<>();
+ Map<String, JsonNode> objectNodes = new TreeMap<>();
+ List<Map<String, String>> resultList = new ArrayList<>();
+ List<Map<String, String>> tempResultList = new ArrayList<>();
+ while (fields.hasNext()) {
+ Map.Entry<String, JsonNode> child = fields.next();
+ if (child.getValue().isValueNode()) {
+ //Normal value node
+ flattenedSingleValuesMap.put(child.getKey(), child.getValue().asText());
+ } else if (child.getValue().isArray()) {
+ //Array Node: Process these nodes later
+ arrNodes.put(child.getKey(), child.getValue());
+ } else {
+ //Object Node
+ objectNodes.put(child.getKey(), child.getValue());
+ }
+ }
+ for (String objectNodeKey : objectNodes.keySet()) {
+ JsonNode objectNode = objectNodes.get(objectNodeKey);
+ modifyKeysInMap(flattenedSingleValuesMap, tempResultList, objectNodeKey, objectNode);
+ }
+ if (tempResultList.isEmpty()) {
+ tempResultList.add(flattenedSingleValuesMap);
+ }
+ if (!arrNodes.isEmpty()) {
+ for (Map<String, String> flattenedMapElement : tempResultList) {
+ for (String arrNodeKey : arrNodes.keySet()) {
+ JsonNode arrNode = arrNodes.get(arrNodeKey);
+ for (JsonNode arrNodeElement : arrNode) {
+ modifyKeysInMap(flattenedMapElement, resultList, arrNodeKey, arrNodeElement);
+ }
+ }
+ }
+ } else {
+ resultList.addAll(tempResultList);
+ }
+ return resultList;
+ }
+
+ private static void modifyKeysInMap(Map<String, String> flattenedMap, List<Map<String, String>> resultList,
+ String arrNodeKey, JsonNode arrNode) {
+ List<Map<String, String>> objectResult = unnestJson(arrNode);
+ for (Map<String, String> flattenedObject : objectResult) {
+ Map<String, String> flattenedObjectCopy = new TreeMap<>(flattenedMap);
+ for (Map.Entry<String, String> entry : flattenedObject.entrySet()) {
+ flattenedObjectCopy.put(arrNodeKey + "." + entry.getKey(), entry.getValue());
+ }
+ resultList.add(flattenedObjectCopy);
+ }
+ }
+
+ @Override
+ public void close()
+ throws IOException {
+
+ }
+
+ private class Writer {
+ private DataOutputStream _dictionaryHeaderWriter;
+ private DataOutputStream _dictionaryOffsetWriter;
+ private File _dictionaryOffsetFile;
+ private DataOutputStream _dictionaryWriter;
+ private DataOutputStream _invertedIndexOffsetWriter;
+ private File _invertedIndexOffsetFile;
+ private DataOutputStream _invertedIndexWriter;
+ private int _dictId;
+ private int _dictOffset;
+ private int _invertedIndexOffset;
+ int _maxDictionaryValueLength = Integer.MIN_VALUE;
+
+ public Writer(File dictionaryheaderFile, File dictionaryOffsetFile, File dictionaryFile,
+ File invertedIndexOffsetFile, File invertedIndexFile)
+ throws IOException {
+ _dictionaryHeaderWriter =
+ new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dictionaryheaderFile)));
+
+ _dictionaryOffsetWriter =
+ new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dictionaryOffsetFile)));
+ _dictionaryOffsetFile = dictionaryOffsetFile;
+ _dictionaryWriter = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dictionaryFile)));
+ _invertedIndexOffsetWriter =
+ new DataOutputStream(new BufferedOutputStream(new FileOutputStream(invertedIndexOffsetFile)));
+ _invertedIndexOffsetFile = invertedIndexOffsetFile;
+ _invertedIndexWriter = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(invertedIndexFile)));
+ _dictId = 0;
+ _dictOffset = 0;
+ _invertedIndexOffset = 0;
+ }
+
+ public void add(byte[] key, RoaringBitmap roaringBitmap)
+ throws IOException {
+ if (key.length > _maxDictionaryValueLength) {
+ _maxDictionaryValueLength = key.length;
+ }
+ //write the key to dictionary
+ _dictionaryOffsetWriter.writeInt(_dictOffset);
+ _dictionaryWriter.write(key);
+
+ //write the roaringBitmap to inverted index
+ _invertedIndexOffsetWriter.writeInt(_invertedIndexOffset);
+
+ int serializedSizeInBytes = roaringBitmap.serializedSizeInBytes();
+ byte[] serializedRoaringBitmap = new byte[serializedSizeInBytes];
+ ByteBuffer serializedRoaringBitmapBuffer = ByteBuffer.wrap(serializedRoaringBitmap);
+ roaringBitmap.serialize(serializedRoaringBitmapBuffer);
+ _invertedIndexWriter.write(serializedRoaringBitmap);
+ System.out.println(
+ "dictId = " + _dictId + ", dict offset:" + _dictOffset + ", valueLength:" + key.length + ", inv offset:"
+ + _invertedIndexOffset + ", serializedSizeInBytes:" + serializedSizeInBytes);
+
+ //increment offsets
+ _dictOffset = _dictOffset + key.length;
+ _invertedIndexOffset = _invertedIndexOffset + serializedSizeInBytes;
+ //increment the dictionary id
+ _dictId = _dictId + 1;
+ }
+
+ void finish()
+ throws IOException {
+ //InvertedIndexReader and VarlengthBytesValueReaderWriter needs one extra entry for offsets since it computes the length for index i using offset[i+1] - offset[i]
+ _invertedIndexOffsetWriter.writeInt(_invertedIndexOffset);
+ _dictionaryOffsetWriter.writeInt(_dictOffset);
+
+ byte[] headerBytes = VarLengthBytesValueReaderWriter.getHeaderBytes(_dictId);
+ _dictionaryHeaderWriter.write(headerBytes);
+ System.out.println("headerBytes = " + Arrays.toString(headerBytes));
+
+ _dictionaryHeaderWriter.close();
+ _dictionaryOffsetWriter.close();
+ _dictionaryWriter.close();
+ _invertedIndexOffsetWriter.close();
+ _invertedIndexWriter.close();
+
+ //data offsets started with zero but the actual dictionary and index will contain (header + offsets + data). so all the offsets must be adjusted ( i.e add size(header) + size(offset) to each offset value)
+ PinotDataBuffer dictionaryOffsetBuffer = PinotDataBuffer
+ .mapFile(dictionaryOffsetFile, false, 0, _dictionaryOffsetFile.length(), ByteOrder.BIG_ENDIAN,
+ "dictionary offset file");
+ int dictOffsetBase = _dictionaryHeaderWriter.size() + _dictionaryOffsetWriter.size();
+ for (int i = 0; i < _dictId + 1; i++) {
+ int offset = dictionaryOffsetBuffer.getInt(i * Integer.BYTES);
+ int newOffset = offset + dictOffsetBase;
+ dictionaryOffsetBuffer.putInt(i * Integer.BYTES, offset + dictOffsetBase);
+ System.out.println("dictId = " + i + ", offset = " + offset + ", newOffset = " + newOffset);
+ }
+
+ PinotDataBuffer invIndexOffsetBuffer = PinotDataBuffer
+ .mapFile(invertedIndexOffsetFile, false, 0, invertedIndexOffsetFile.length(), ByteOrder.BIG_ENDIAN,
+ "invertedIndexOffsetFile");
+ int invIndexOffsetBase = _invertedIndexOffsetWriter.size();
+ for (int i = 0; i < _dictId + 1; i++) {
+ int offset = invIndexOffsetBuffer.getInt(i * Integer.BYTES);
+ int newOffset = offset + invIndexOffsetBase;
+ System.out.println("offset = " + offset + ", newOffset = " + newOffset);
+
+ invIndexOffsetBuffer.putInt(i * Integer.BYTES, newOffset);
+ }
+
+ invIndexOffsetBuffer.close();
+ dictionaryOffsetBuffer.close();
+ }
+
+ public int getMaxDictionaryValueLength() {
+ return _maxDictionaryValueLength;
+ }
+ }
+
+ public static void main(String[] args)
+ throws Exception {
+
+ String json0 = " { \"a\" : { \"b\" : 1, \"c\": 3, \"d\": [{\"x\" : 1}, {\"y\" : 1}] }, \"e\": \"f\", \"g\":2.3 }";
+ String json1 =
+ " { \"name\" : \"adam\", \"age\": 30, \"country\": \"us\", \"address\": {\"number\" : 112, \"street\": \"main st\", \"country\": \"us\" } }";
+ String json2 = " { \"name\" : \"adam\", \"age\": 30 }";
+ String json3 = "{\n" + " \"name\" : \"adam\",\n" + " \"age\" : 30,\n" + " \"country\" : \"us\",\n"
+ + " \"addresses\" : [{\n" + " \"number\" : 1,\n" + " \"street\" : \"main st\",\n"
+ + " \"country\" : \"us\"\n" + " }, {\n" + " \"number\" : 2,\n" + " \"street\" : \"second st\",\n"
+ + " \"country\" : \"us\"\n" + " }, {\n" + " \"number\" : 3,\n" + " \"street\" : \"third st\",\n"
+ + " \"country\" : \"us\"\n" + " }]\n" + "}\n";
+
+ String json4 =
+ "{\n" + " \"year\": [\n" + " 2018\n" + " ],\n" + " \"customers\": [\n" + " {\n"
+ + " \"name\": \"John\",\n" + " \"contact\": [\n" + " {\n"
+ + " \"phone\": \"home\",\n" + " \"number\": \"333-3334\"\n"
+ + " }\n" + " ]\n" + " },\n" + " {\n"
+ + " \"name\": \"Jane\",\n" + " \"contact\": [\n" + " {\n"
+ + " \"phone\": \"home\",\n" + " \"number\": \"555-5556\"\n"
+ + " }\n" + " ],\n" + " \"surname\": \"Shaw\"\n" + " }\n"
+ + " ]\n" + "}";
+
+ String json5 = "{ \n" + " \"accounting\" : [ \n" + " { \"firstName\" : \"John\", \n"
+ + " \"lastName\" : \"Doe\",\n" + " \"age\" : 23 },\n" + "\n"
+ + " { \"firstName\" : \"Mary\", \n" + " \"lastName\" : \"Smith\",\n"
+ + " \"age\" : 32 }\n" + " ], \n"
+ + " \"sales\" : [ \n" + " { \"firstName\" : \"Sally\", \n"
+ + " \"lastName\" : \"Green\",\n" + " \"age\" : 27 },\n"
+ + "\n" + " { \"firstName\" : \"Jim\", \n"
+ + " \"lastName\" : \"Galley\",\n" + " \"age\" : 41 }\n"
+ + " ] \n" + "} ";
+
+ String json = json3;
+ System.out.println("json = " + json);
+ JsonNode rawJsonNode = new ObjectMapper().readTree(json);
+
+ System.out.println(
+ "rawJsonNode = " + new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(rawJsonNode));
+ String flattenJson = JsonFlattener.flatten(json);
+
+ System.out.println("flattenJson = " + flattenJson);
+ JsonNode jsonNode = new ObjectMapper().readTree(flattenJson);
+
+ System.out
+ .println("jsonNode = " + new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(jsonNode));
+ Map<String, Object> stringObjectMap = JsonFlattener.flattenAsMap(json);
+ System.out.println("JsonFlattener.flattenAsMap(json) = " + stringObjectMap);
+ FieldSpec fieldSpec = new DimensionFieldSpec();
+ fieldSpec.setName("person");
+ File tempDir = Files.createTempDir();
+ JSONIndexCreator creator = new JSONIndexCreator(tempDir, fieldSpec);
+ List<Map<String, String>> maps = creator.unnestJson(rawJsonNode);
+ System.out.println("maps = " + maps.toString().replaceAll("},", "}\n"));
+ creator.add(json.getBytes());
+
+ creator.seal();
+ System.out.println("Output Dir = " + tempDir);
+ System.out.println("FileUtils.listFiles(tempDir, null, true) = " + FileUtils.listFiles(tempDir, null, true).stream()
+ .map(file -> file.getName()).collect(Collectors.toList()));
+
+ //Test reader
+ PinotDataBuffer buffer =
+ PinotDataBuffer.mapReadOnlyBigEndianFile(new File(tempDir, fieldSpec.getName() + ".json.idx"));
+ JSONIndexReader reader = new JSONIndexReader(buffer);
+ ExpressionContext lhs = ExpressionContext.forIdentifier("person");
+ Predicate predicate = new EqPredicate(lhs, "addresses.street" + POSTING_LIST_KEY_SEPARATOR + "third st");
+ MutableRoaringBitmap matchingDocIds = reader.getMatchingDocIds(predicate);
+ System.out.println("matchingDocIds = " + matchingDocIds);
+
+ //Test filter operator
+ FilterContext filterContext = QueryContextConverterUtils
+ .getFilter(CalciteSqlParser.compileToExpression("name='adam' AND addresses.street='main st'"));
+ int numDocs = 1;
+ JSONMatchFilterOperator operator = new JSONMatchFilterOperator("person", filterContext, reader, numDocs);
+ FilterBlock filterBlock = operator.nextBlock();
+ BlockDocIdIterator iterator = filterBlock.getBlockDocIdSet().iterator();
+ int docId = -1;
+ while ((docId = iterator.next()) > 0) {
+ System.out.println("docId = " + docId);
+ }
+ }
+}
\ No newline at end of file
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/inv/NestedObjectIndexCreator.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/inv/NestedObjectIndexCreator.java
deleted file mode 100644
index adfa19d..0000000
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/creator/impl/inv/NestedObjectIndexCreator.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.pinot.core.segment.creator.impl.inv;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.github.wnameless.json.flattener.JsonFlattener;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import org.apache.pinot.spi.data.FieldSpec;
-
-
-public class NestedObjectIndexCreator {
-
- public NestedObjectIndexCreator(File indexDir, FieldSpec fieldSpec, FieldSpec.DataType valueType) {
-
- }
-
- private static List<Map<String, String>> unnestJson(JsonNode root) {
- Iterator<Map.Entry<String, JsonNode>> fields = root.fields();
- Map<String, String> flattenedSingleValuesMap = new TreeMap<>();
- Map<String, JsonNode> arrNodes = new TreeMap<>();
- Map<String, JsonNode> objectNodes = new TreeMap<>();
- List<Map<String, String>> resultList = new ArrayList<>();
- List<Map<String, String>> tempResultList = new ArrayList<>();
- while (fields.hasNext()) {
- Map.Entry<String, JsonNode> child = fields.next();
- if (child.getValue().isValueNode()) {
- //Normal value node
- flattenedSingleValuesMap.put(child.getKey(), child.getValue().asText());
- } else if (child.getValue().isArray()) {
- //Array Node: Process these nodes later
- arrNodes.put(child.getKey(), child.getValue());
- } else {
- //Object Node
- objectNodes.put(child.getKey(), child.getValue());
- }
- }
- for (String objectNodeKey : objectNodes.keySet()) {
- JsonNode objectNode = objectNodes.get(objectNodeKey);
- modifyKeysInMap(flattenedSingleValuesMap, tempResultList, objectNodeKey, objectNode);
- }
- if (tempResultList.isEmpty()) {
- tempResultList.add(flattenedSingleValuesMap);
- }
- if (!arrNodes.isEmpty()) {
- for (Map<String, String> flattenedMapElement : tempResultList) {
- for (String arrNodeKey : arrNodes.keySet()) {
- JsonNode arrNode = arrNodes.get(arrNodeKey);
- for (JsonNode arrNodeElement : arrNode) {
- modifyKeysInMap(flattenedMapElement, resultList, arrNodeKey, arrNodeElement);
- }
- }
- }
- } else {
- resultList.addAll(tempResultList);
- }
- return resultList;
- }
-
- private static void modifyKeysInMap(Map<String, String> flattenedMap, List<Map<String, String>> resultList,
- String arrNodeKey, JsonNode arrNode) {
- List<Map<String, String>> objectResult = unnestJson(arrNode);
- for (Map<String, String> flattenedObject : objectResult) {
- Map<String, String> flattenedObjectCopy = new TreeMap<>(flattenedMap);
- for (Map.Entry<String, String> entry : flattenedObject.entrySet()) {
- flattenedObjectCopy.put(arrNodeKey + "." + entry.getKey(), entry.getValue());
- }
- resultList.add(flattenedObjectCopy);
- }
- }
-
- public void add(byte[] data)
- throws IOException {
-
- JsonNode jsonNode = new ObjectMapper().readTree(data);
- List<Map<String, String>> flattenedMapList = unnestJson(jsonNode);
- for (Map<String, String> map : flattenedMapList) {
-
- }
- }
-
- public static void main(String[] args)
- throws IOException {
-
- String json0 = " { \"a\" : { \"b\" : 1, \"c\": 3, \"d\": [{\"x\" : 1}, {\"y\" : 1}] }, \"e\": \"f\", \"g\":2.3 }";
- String json1 =
- " { \"name\" : \"adam\", \"age\": 30, \"country\": \"us\", \"address\": {\"number\" : 112, \"street\": \"main st\", \"country\": \"us\" } }";
- String json2 = " { \"name\" : \"adam\", \"age\": 30 }";
- String json3 = "{\n" + " \"name\" : \"adam\",\n" + " \"age\" : 30,\n" + " \"country\" : \"us\",\n"
- + " \"addresses\" : [{\n" + " \"number\" : 1,\n" + " \"street\" : \"main st\",\n"
- + " \"country\" : \"us\"\n" + " }, {\n" + " \"number\" : 2,\n" + " \"street\" : \"second st\",\n"
- + " \"country\" : \"us\"\n" + " }, {\n" + " \"number\" : 3,\n" + " \"street\" : \"third st\",\n"
- + " \"country\" : \"us\"\n" + " }]\n" + "}\n";
-
- String json4 =
- "{\n" + " \"year\": [\n" + " 2018\n" + " ],\n" + " \"customers\": [\n" + " {\n"
- + " \"name\": \"John\",\n" + " \"contact\": [\n" + " {\n"
- + " \"phone\": \"home\",\n" + " \"number\": \"333-3334\"\n"
- + " }\n" + " ]\n" + " },\n" + " {\n"
- + " \"name\": \"Jane\",\n" + " \"contact\": [\n" + " {\n"
- + " \"phone\": \"home\",\n" + " \"number\": \"555-5556\"\n"
- + " }\n" + " ],\n" + " \"surname\": \"Shaw\"\n" + " }\n"
- + " ]\n" + "}";
-
- String json5 = "{ \n" + " \"accounting\" : [ \n" + " { \"firstName\" : \"John\", \n"
- + " \"lastName\" : \"Doe\",\n" + " \"age\" : 23 },\n" + "\n"
- + " { \"firstName\" : \"Mary\", \n" + " \"lastName\" : \"Smith\",\n"
- + " \"age\" : 32 }\n" + " ], \n"
- + " \"sales\" : [ \n" + " { \"firstName\" : \"Sally\", \n"
- + " \"lastName\" : \"Green\",\n" + " \"age\" : 27 },\n"
- + "\n" + " { \"firstName\" : \"Jim\", \n"
- + " \"lastName\" : \"Galley\",\n" + " \"age\" : 41 }\n"
- + " ] \n" + "} ";
-
- String json = json3;
- System.out.println("json = " + json);
- JsonNode rawJsonNode = new ObjectMapper().readTree(json);
-
- System.out.println(
- "rawJsonNode = " + new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(rawJsonNode));
- String flattenJson = JsonFlattener.flatten(json);
-
- System.out.println("flattenJson = " + flattenJson);
- JsonNode jsonNode = new ObjectMapper().readTree(flattenJson);
-
- System.out
- .println("jsonNode = " + new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(jsonNode));
- Map<String, Object> stringObjectMap = JsonFlattener.flattenAsMap(json);
- System.out.println("JsonFlattener.flattenAsMap(json) = " + stringObjectMap);
- NestedObjectIndexCreator creator = new NestedObjectIndexCreator(null, null, null);
- List<Map<String, String>> maps = creator.unnestJson(rawJsonNode);
- System.out.println("maps = " + maps.toString().replaceAll("},", "}\n"));
-// creator.add(json.getBytes());
- }
-}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/ColumnIndexContainer.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/ColumnIndexContainer.java
index 087f4b2..909a669 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/ColumnIndexContainer.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/ColumnIndexContainer.java
@@ -23,6 +23,7 @@ import org.apache.pinot.core.segment.index.readers.BloomFilterReader;
import org.apache.pinot.core.segment.index.readers.Dictionary;
import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReaderImpl;
import org.apache.pinot.core.segment.index.readers.TextIndexReader;
@@ -68,4 +69,10 @@ public interface ColumnIndexContainer extends Closeable {
* @return Get the null value vector for the column, or {@code null} if it does not exist.
*/
NullValueVectorReaderImpl getNullValueVector();
+
+ /**
+ *
+ * @return Get the JSON index for the column, or {@code null} if it does not exist.
+ */
+ JSONIndexReader getJSONIndex();
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/PhysicalColumnIndexContainer.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/PhysicalColumnIndexContainer.java
index 595a8e0..398623b 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/PhysicalColumnIndexContainer.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/column/PhysicalColumnIndexContainer.java
@@ -33,6 +33,7 @@ import org.apache.pinot.core.segment.index.readers.FloatDictionary;
import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
import org.apache.pinot.core.segment.index.readers.IntDictionary;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.LongDictionary;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReaderImpl;
import org.apache.pinot.core.segment.index.readers.OnHeapDoubleDictionary;
@@ -70,6 +71,7 @@ public final class PhysicalColumnIndexContainer implements ColumnIndexContainer
private final BaseImmutableDictionary _dictionary;
private final BloomFilterReader _bloomFilter;
private final NullValueVectorReaderImpl _nullValueVectorReader;
+ private final JSONIndexReader _jsonIndex;
public PhysicalColumnIndexContainer(SegmentDirectory.Reader segmentReader, ColumnMetadata metadata,
IndexLoadingConfig indexLoadingConfig, File segmentIndexDir)
@@ -79,6 +81,7 @@ public final class PhysicalColumnIndexContainer implements ColumnIndexContainer
boolean loadRangeIndex = indexLoadingConfig.getRangeIndexColumns().contains(columnName);
boolean loadTextIndex = indexLoadingConfig.getTextIndexColumns().contains(columnName);
boolean loadOnHeapDictionary = indexLoadingConfig.getOnHeapDictionaryColumns().contains(columnName);
+ boolean loadJSONIndex = indexLoadingConfig.getJsonIndexColumns().contains(columnName);
BloomFilterConfig bloomFilterConfig = indexLoadingConfig.getBloomFilterConfigs().get(columnName);
if (segmentReader.hasIndexFor(columnName, ColumnIndexType.NULLVALUE_VECTOR)) {
@@ -96,6 +99,13 @@ public final class PhysicalColumnIndexContainer implements ColumnIndexContainer
} else {
_textIndex = null;
}
+ if (loadJSONIndex) {
+ Preconditions.checkState(segmentReader.hasIndexFor(columnName, ColumnIndexType.JSON_INDEX));
+ PinotDataBuffer jsonIndexBuffer = segmentReader.getIndexFor(columnName, ColumnIndexType.JSON_INDEX);
+ _jsonIndex = new JSONIndexReader(jsonIndexBuffer);
+ } else {
+ _jsonIndex = null;
+ }
PinotDataBuffer fwdIndexBuffer = segmentReader.getIndexFor(columnName, ColumnIndexType.FORWARD_INDEX);
@@ -187,6 +197,11 @@ public final class PhysicalColumnIndexContainer implements ColumnIndexContainer
return _nullValueVectorReader;
}
+ @Override
+ public JSONIndexReader getJSONIndex() {
+ return _jsonIndex;
+ }
+
//TODO: move this to a DictionaryLoader class
public static BaseImmutableDictionary loadDictionary(PinotDataBuffer dictionaryBuffer, ColumnMetadata metadata,
boolean loadOnHeap) {
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/BaseDataSource.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/BaseDataSource.java
index 172e8e6..278b737 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/BaseDataSource.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/BaseDataSource.java
@@ -25,6 +25,7 @@ import org.apache.pinot.core.segment.index.readers.BloomFilterReader;
import org.apache.pinot.core.segment.index.readers.Dictionary;
import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReader;
import org.apache.pinot.core.segment.index.readers.TextIndexReader;
@@ -38,11 +39,14 @@ public abstract class BaseDataSource implements DataSource {
private final TextIndexReader _textIndex;
private final BloomFilterReader _bloomFilter;
private final NullValueVectorReader _nullValueVector;
+ @Nullable
+ private JSONIndexReader _jsonIndexReader;
public BaseDataSource(DataSourceMetadata dataSourceMetadata, ForwardIndexReader<?> forwardIndex,
@Nullable Dictionary dictionary, @Nullable InvertedIndexReader<?> invertedIndex,
@Nullable InvertedIndexReader<?> rangeIndex, @Nullable TextIndexReader textIndex,
- @Nullable BloomFilterReader bloomFilter, @Nullable NullValueVectorReader nullValueVector) {
+ @Nullable BloomFilterReader bloomFilter, @Nullable NullValueVectorReader nullValueVector,
+ @Nullable JSONIndexReader jsonIndexReader) {
_dataSourceMetadata = dataSourceMetadata;
_forwardIndex = forwardIndex;
_dictionary = dictionary;
@@ -51,6 +55,7 @@ public abstract class BaseDataSource implements DataSource {
_textIndex = textIndex;
_bloomFilter = bloomFilter;
_nullValueVector = nullValueVector;
+ _jsonIndexReader = jsonIndexReader;
}
@Override
@@ -98,4 +103,10 @@ public abstract class BaseDataSource implements DataSource {
public NullValueVectorReader getNullValueVector() {
return _nullValueVector;
}
+
+ @Nullable
+ @Override
+ public JSONIndexReader getJSONIndex() {
+ return _jsonIndexReader;
+ }
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/ImmutableDataSource.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/ImmutableDataSource.java
index cc09adf..9af73a7 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/ImmutableDataSource.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/ImmutableDataSource.java
@@ -23,7 +23,9 @@ import javax.annotation.Nullable;
import org.apache.pinot.core.common.DataSourceMetadata;
import org.apache.pinot.core.data.partition.PartitionFunction;
import org.apache.pinot.core.segment.index.column.ColumnIndexContainer;
+import org.apache.pinot.core.segment.index.datasource.BaseDataSource;
import org.apache.pinot.core.segment.index.metadata.ColumnMetadata;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.spi.data.FieldSpec;
@@ -36,9 +38,10 @@ public class ImmutableDataSource extends BaseDataSource {
super(new ImmutableDataSourceMetadata(columnMetadata), columnIndexContainer.getForwardIndex(),
columnIndexContainer.getDictionary(), columnIndexContainer.getInvertedIndex(),
columnIndexContainer.getRangeIndex(), columnIndexContainer.getTextIndex(),
- columnIndexContainer.getBloomFilter(), columnIndexContainer.getNullValueVector());
+ columnIndexContainer.getBloomFilter(), columnIndexContainer.getNullValueVector(), columnIndexContainer.getJSONIndex());
}
+
private static class ImmutableDataSourceMetadata implements DataSourceMetadata {
final FieldSpec _fieldSpec;
final boolean _sorted;
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/MutableDataSource.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/MutableDataSource.java
index a927353..c38b1bb 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/MutableDataSource.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/datasource/MutableDataSource.java
@@ -26,6 +26,7 @@ import org.apache.pinot.core.segment.index.readers.BloomFilterReader;
import org.apache.pinot.core.segment.index.readers.Dictionary;
import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReader;
import org.apache.pinot.core.segment.index.readers.TextIndexReader;
import org.apache.pinot.spi.data.FieldSpec;
@@ -42,10 +43,10 @@ public class MutableDataSource extends BaseDataSource {
@Nullable Comparable maxValue, ForwardIndexReader forwardIndex, @Nullable Dictionary dictionary,
@Nullable InvertedIndexReader invertedIndex, @Nullable InvertedIndexReader rangeIndex,
@Nullable TextIndexReader textIndex, @Nullable BloomFilterReader bloomFilter,
- @Nullable NullValueVectorReader nullValueVector) {
+ @Nullable NullValueVectorReader nullValueVector, JSONIndexReader jsonIndex) {
super(new MutableDataSourceMetadata(fieldSpec, numDocs, numValues, maxNumValuesPerMVEntry, partitionFunction,
partitions, minValue, maxValue), forwardIndex, dictionary, invertedIndex, rangeIndex, textIndex, bloomFilter,
- nullValueVector);
+ nullValueVector, jsonIndex);
}
private static class MutableDataSourceMetadata implements DataSourceMetadata {
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/IndexLoadingConfig.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/IndexLoadingConfig.java
index a6817a0..24a799c 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/IndexLoadingConfig.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/IndexLoadingConfig.java
@@ -47,6 +47,7 @@ public class IndexLoadingConfig {
private ReadMode _readMode = ReadMode.DEFAULT_MODE;
private List<String> _sortedColumns = Collections.emptyList();
private Set<String> _invertedIndexColumns = new HashSet<>();
+ private Set<String> _jsonIndexColumns = new HashSet<>();
private Set<String> _textIndexColumns = new HashSet<>();
private Set<String> _rangeIndexColumns = new HashSet<>();
private Set<String> _noDictionaryColumns = new HashSet<>(); // TODO: replace this by _noDictionaryConfig.
@@ -92,6 +93,11 @@ public class IndexLoadingConfig {
_invertedIndexColumns.addAll(invertedIndexColumns);
}
+ List<String> jsonIndexColumns = indexingConfig.getJsonIndexColumns();
+ if (jsonIndexColumns != null) {
+ _jsonIndexColumns.addAll(jsonIndexColumns);
+ }
+
List<String> rangeIndexColumns = indexingConfig.getRangeIndexColumns();
if (rangeIndexColumns != null) {
_rangeIndexColumns.addAll(rangeIndexColumns);
@@ -222,6 +228,10 @@ public class IndexLoadingConfig {
return _invertedIndexColumns;
}
+ public Set<String> getJsonIndexColumns() {
+ return _jsonIndexColumns;
+ }
+
public Set<String> getRangeIndexColumns() {
return _rangeIndexColumns;
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/SegmentPreProcessor.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/SegmentPreProcessor.java
index ca7f7e9..06a7adb 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/SegmentPreProcessor.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/SegmentPreProcessor.java
@@ -31,6 +31,7 @@ import org.apache.pinot.core.segment.index.loader.columnminmaxvalue.ColumnMinMax
import org.apache.pinot.core.segment.index.loader.defaultcolumn.DefaultColumnHandler;
import org.apache.pinot.core.segment.index.loader.defaultcolumn.DefaultColumnHandlerFactory;
import org.apache.pinot.core.segment.index.loader.invertedindex.InvertedIndexHandler;
+import org.apache.pinot.core.segment.index.loader.invertedindex.JSONIndexHandler;
import org.apache.pinot.core.segment.index.loader.invertedindex.RangeIndexHandler;
import org.apache.pinot.core.segment.index.loader.invertedindex.TextIndexHandler;
import org.apache.pinot.core.segment.index.metadata.SegmentMetadataImpl;
@@ -113,6 +114,11 @@ public class SegmentPreProcessor implements AutoCloseable {
new RangeIndexHandler(_indexDir, _segmentMetadata, _indexLoadingConfig, segmentWriter);
rangeIndexHandler.createRangeIndices();
+ // Create json range indices according to the index config.
+ JSONIndexHandler jsonIndexHandler =
+ new JSONIndexHandler(_indexDir, _segmentMetadata, _indexLoadingConfig, segmentWriter);
+ jsonIndexHandler.createJsonIndices();
+
Set<String> textIndexColumns = _indexLoadingConfig.getTextIndexColumns();
if (textIndexColumns.size() > 0) {
TextIndexHandler textIndexHandler =
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/invertedindex/JSONIndexHandler.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/invertedindex/JSONIndexHandler.java
new file mode 100644
index 0000000..64282de
--- /dev/null
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/loader/invertedindex/JSONIndexHandler.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.segment.index.loader.invertedindex;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+import org.apache.commons.io.FileUtils;
+import org.apache.pinot.core.indexsegment.generator.SegmentVersion;
+import org.apache.pinot.core.segment.creator.impl.inv.JSONIndexCreator;
+import org.apache.pinot.core.segment.index.column.PhysicalColumnIndexContainer;
+import org.apache.pinot.core.segment.index.loader.IndexLoadingConfig;
+import org.apache.pinot.core.segment.index.loader.LoaderUtils;
+import org.apache.pinot.core.segment.index.metadata.ColumnMetadata;
+import org.apache.pinot.core.segment.index.metadata.SegmentMetadataImpl;
+import org.apache.pinot.core.segment.index.readers.BaseImmutableDictionary;
+import org.apache.pinot.core.segment.index.readers.Dictionary;
+import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
+import org.apache.pinot.core.segment.index.readers.ForwardIndexReaderContext;
+import org.apache.pinot.core.segment.index.readers.forward.FixedBitMVForwardIndexReader;
+import org.apache.pinot.core.segment.index.readers.forward.FixedBitSVForwardIndexReader;
+import org.apache.pinot.core.segment.memory.PinotDataBuffer;
+import org.apache.pinot.core.segment.store.ColumnIndexType;
+import org.apache.pinot.core.segment.store.SegmentDirectory;
+import org.apache.pinot.spi.data.FieldSpec;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.pinot.core.segment.creator.impl.V1Constants.Indexes.JSON_INDEX_FILE_EXTENSION;
+
+
+@SuppressWarnings({"rawtypes", "unchecked"})
+public class JSONIndexHandler {
+ private static final Logger LOGGER = LoggerFactory.getLogger(JSONIndexHandler.class);
+
+ private final File _indexDir;
+ private final SegmentDirectory.Writer _segmentWriter;
+ private final String _segmentName;
+ private final SegmentVersion _segmentVersion;
+ private final Set<ColumnMetadata> _jsonIndexColumns = new HashSet<>();
+
+ public JSONIndexHandler(File indexDir, SegmentMetadataImpl segmentMetadata, IndexLoadingConfig indexLoadingConfig,
+ SegmentDirectory.Writer segmentWriter) {
+ _indexDir = indexDir;
+ _segmentWriter = segmentWriter;
+ _segmentName = segmentMetadata.getName();
+ _segmentVersion = SegmentVersion.valueOf(segmentMetadata.getVersion());
+
+ // Only create json index on dictionary-encoded unsorted columns
+ for (String column : indexLoadingConfig.getJsonIndexColumns()) {
+ ColumnMetadata columnMetadata = segmentMetadata.getColumnMetadataFor(column);
+ if (columnMetadata != null && !columnMetadata.isSorted()) {
+ _jsonIndexColumns.add(columnMetadata);
+ }
+ }
+ }
+
+ public void createJsonIndices()
+ throws IOException {
+ for (ColumnMetadata columnMetadata : _jsonIndexColumns) {
+ createJSONIndexForColumn(columnMetadata);
+ }
+ }
+
+ private void createJSONIndexForColumn(ColumnMetadata columnMetadata)
+ throws IOException {
+ String column = columnMetadata.getColumnName();
+
+ File inProgress = new File(_indexDir, column + JSON_INDEX_FILE_EXTENSION + ".inprogress");
+ File jsonIndexFile = new File(_indexDir, column + JSON_INDEX_FILE_EXTENSION);
+
+ if (!inProgress.exists()) {
+ // Marker file does not exist, which means last run ended normally.
+
+ if (_segmentWriter.hasIndexFor(column, ColumnIndexType.JSON_INDEX)) {
+ // Skip creating json index if already exists.
+
+ LOGGER.info("Found json index for segment: {}, column: {}", _segmentName, column);
+ return;
+ }
+
+ // Create a marker file.
+ FileUtils.touch(inProgress);
+ } else {
+ // Marker file exists, which means last run gets interrupted.
+
+ // Remove json index if exists.
+ // For v1 and v2, it's the actual json index. For v3, it's the temporary json index.
+ FileUtils.deleteQuietly(jsonIndexFile);
+ }
+
+ // Create new json index for the column.
+ LOGGER.info("Creating new json index for segment: {}, column: {}", _segmentName, column);
+ if (columnMetadata.hasDictionary()) {
+ handleDictionaryBasedColumn(columnMetadata);
+ } else {
+ handleNonDictionaryBasedColumn(columnMetadata);
+ }
+
+ // For v3, write the generated json index file into the single file and remove it.
+ if (_segmentVersion == SegmentVersion.v3) {
+ LoaderUtils.writeIndexToV3Format(_segmentWriter, column, jsonIndexFile, ColumnIndexType.JSON_INDEX);
+ }
+
+ // Delete the marker file.
+ FileUtils.deleteQuietly(inProgress);
+
+ LOGGER.info("Created json index for segment: {}, column: {}", _segmentName, column);
+ }
+
+ private void handleDictionaryBasedColumn(ColumnMetadata columnMetadata)
+ throws IOException {
+ int numDocs = columnMetadata.getTotalDocs();
+ try (ForwardIndexReader forwardIndexReader = getForwardIndexReader(columnMetadata, _segmentWriter);
+ ForwardIndexReaderContext readerContext = forwardIndexReader.createContext();
+ Dictionary dictionary = getDictionaryReader(columnMetadata, _segmentWriter);
+ JSONIndexCreator jsonIndexCreator = new JSONIndexCreator(_indexDir, columnMetadata.getFieldSpec())) {
+ if (columnMetadata.isSingleValue()) {
+ switch (columnMetadata.getDataType()) {
+ case STRING:
+ for (int i = 0; i < numDocs; i++) {
+ int dictId = forwardIndexReader.getDictId(i, readerContext);
+ jsonIndexCreator.add(dictionary.getStringValue(dictId).getBytes("UTF-8"));
+ }
+ break;
+ case BYTES:
+ // Single-value column
+ for (int i = 0; i < numDocs; i++) {
+ int dictId = forwardIndexReader.getDictId(i, readerContext);
+ jsonIndexCreator.add(dictionary.getBytesValue(dictId));
+ }
+ break;
+ default:
+ throw new IllegalStateException("Unsupported data type: " + columnMetadata.getDataType());
+ }
+ } else {
+ // Multi-value column
+ throw new IllegalStateException("JSON Indexing is not supported on multi-valued columns ");
+ }
+ jsonIndexCreator.seal();
+ }
+ }
+
+ private void handleNonDictionaryBasedColumn(ColumnMetadata columnMetadata)
+ throws IOException {
+ FieldSpec.DataType dataType = columnMetadata.getDataType();
+ if (dataType != FieldSpec.DataType.BYTES || dataType != FieldSpec.DataType.STRING) {
+ throw new UnsupportedOperationException(
+ "JSON indexing is only supported for STRING/BYTES datatype but found: " + dataType);
+ }
+ int numDocs = columnMetadata.getTotalDocs();
+ try (ForwardIndexReader forwardIndexReader = getForwardIndexReader(columnMetadata, _segmentWriter);
+ ForwardIndexReaderContext readerContext = forwardIndexReader.createContext();
+ JSONIndexCreator jsonIndexCreator = new JSONIndexCreator(_indexDir, columnMetadata.getFieldSpec())) {
+ if (columnMetadata.isSingleValue()) {
+ // Single-value column.
+ switch (dataType) {
+ case STRING:
+ case BYTES:
+ for (int i = 0; i < numDocs; i++) {
+ jsonIndexCreator.add(forwardIndexReader.getBytes(i, readerContext));
+ }
+ break;
+ default:
+ throw new IllegalStateException("JSON Indexing Unsupported for data type: " + dataType);
+ }
+ } else {
+ // Multi-value column
+ switch (dataType) {
+ default:
+ throw new IllegalStateException("JSON Indexing is not supported on multi-valued columns ");
+ }
+ }
+ jsonIndexCreator.seal();
+ }
+ }
+
+ private ForwardIndexReader<?> getForwardIndexReader(ColumnMetadata columnMetadata,
+ SegmentDirectory.Writer segmentWriter)
+ throws IOException {
+ PinotDataBuffer buffer = segmentWriter.getIndexFor(columnMetadata.getColumnName(), ColumnIndexType.FORWARD_INDEX);
+ int numRows = columnMetadata.getTotalDocs();
+ int numBitsPerValue = columnMetadata.getBitsPerElement();
+ if (columnMetadata.isSingleValue()) {
+ return new FixedBitSVForwardIndexReader(buffer, numRows, numBitsPerValue);
+ } else {
+ return new FixedBitMVForwardIndexReader(buffer, numRows, columnMetadata.getTotalNumberOfEntries(),
+ numBitsPerValue);
+ }
+ }
+
+ private BaseImmutableDictionary getDictionaryReader(ColumnMetadata columnMetadata,
+ SegmentDirectory.Writer segmentWriter)
+ throws IOException {
+ PinotDataBuffer buffer = segmentWriter.getIndexFor(columnMetadata.getColumnName(), ColumnIndexType.DICTIONARY);
+ BaseImmutableDictionary dictionary = PhysicalColumnIndexContainer.loadDictionary(buffer, columnMetadata, false);
+ return dictionary;
+ }
+}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/index/readers/JSONIndexReader.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/readers/JSONIndexReader.java
new file mode 100644
index 0000000..37c0948
--- /dev/null
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/index/readers/JSONIndexReader.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pinot.core.segment.index.readers;
+
+import com.google.common.base.Preconditions;
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Arrays;
+import org.apache.calcite.sql.parser.SqlParseException;
+import org.apache.pinot.common.utils.StringUtil;
+import org.apache.pinot.core.operator.blocks.FilterBlock;
+import org.apache.pinot.core.operator.docidsets.BitmapDocIdSet;
+import org.apache.pinot.core.operator.filter.BitmapBasedFilterOperator;
+import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluator;
+import org.apache.pinot.core.operator.filter.predicate.PredicateEvaluatorProvider;
+import org.apache.pinot.core.query.request.context.FilterContext;
+import org.apache.pinot.core.query.request.context.predicate.Predicate;
+import org.apache.pinot.core.query.request.context.utils.QueryContextConverterUtils;
+import org.apache.pinot.core.segment.memory.PinotDataBuffer;
+import org.apache.pinot.spi.data.FieldSpec;
+import org.apache.pinot.sql.parsers.CalciteSqlParser;
+import org.roaringbitmap.buffer.ImmutableRoaringBitmap;
+import org.roaringbitmap.buffer.MutableRoaringBitmap;
+
+
+public class JSONIndexReader implements Closeable {
+
+ private static int EXPECTED_VERSION = 1;
+ private static int DICT_HEADER_INDEX = 0;
+ private static int DICT_OFFSET_INDEX = 1;
+ private static int DICT_DATA_INDEX = 2;
+ private static int INV_OFFSET_INDEX = 3;
+ private static int INV_DATA_INDEX = 4;
+ private static int FLATTENED_2_ROOT_INDEX = 5;
+
+ private final BitmapInvertedIndexReader invertedIndexReader;
+ private final StringDictionary dictionary;
+ private final long cardinality;
+ private final long numFlattenedDocs;
+ private final PinotDataBuffer flattened2RootDocIdBuffer;
+
+ public JSONIndexReader(PinotDataBuffer pinotDataBuffer) {
+
+ int version = pinotDataBuffer.getInt(0);
+ int maxKeyLength = pinotDataBuffer.getInt(1 * Integer.BYTES);
+
+ Preconditions.checkState(version == EXPECTED_VERSION, String
+ .format("Index version:{} is not supported by this reader. expected version:{}", version, EXPECTED_VERSION));
+
+ // dictionaryHeaderFile, dictionaryOffsetFile, dictionaryFile, invIndexOffsetFile, invIndexFile, FlattenedDocId2DocIdMappingFile
+ int numBuffers = 6;
+ long bufferStartOffsets[] = new long[numBuffers];
+ long bufferSizeArray[] = new long[numBuffers];
+ for (int i = 0; i < numBuffers; i++) {
+ bufferStartOffsets[i] = pinotDataBuffer.getLong(2 * Integer.BYTES + 2 * i * Long.BYTES);
+ bufferSizeArray[i] = pinotDataBuffer.getLong(2 * Integer.BYTES + 2 * i * Long.BYTES + Long.BYTES);
+ }
+ cardinality = bufferSizeArray[DICT_OFFSET_INDEX] / Integer.BYTES - 1;
+
+ long dictionaryStartOffset = bufferStartOffsets[DICT_HEADER_INDEX];
+ long dictionarySize =
+ bufferSizeArray[DICT_HEADER_INDEX] + bufferSizeArray[DICT_OFFSET_INDEX] + bufferSizeArray[DICT_DATA_INDEX];
+
+ //TODO: REMOVE DEBUG START
+ byte[] dictHeaderBytes = new byte[(int) bufferSizeArray[DICT_HEADER_INDEX]];
+ pinotDataBuffer.copyTo(bufferStartOffsets[DICT_HEADER_INDEX], dictHeaderBytes);
+ System.out.println("Arrays.toString(dictHeaderBytes) = " + Arrays.toString(dictHeaderBytes));
+ //TODO: REMOVE DEBUG END
+
+ PinotDataBuffer dictionaryBuffer =
+ pinotDataBuffer.view(dictionaryStartOffset, dictionaryStartOffset + dictionarySize);
+ dictionary = new StringDictionary(dictionaryBuffer, (int) cardinality, maxKeyLength, Byte.valueOf("0"));
+
+ long invIndexStartOffset = bufferStartOffsets[INV_OFFSET_INDEX];
+ long invIndexSize = bufferSizeArray[INV_OFFSET_INDEX] + bufferSizeArray[INV_DATA_INDEX];
+
+ PinotDataBuffer invIndexBuffer = pinotDataBuffer.view(invIndexStartOffset, invIndexStartOffset + invIndexSize);
+ invertedIndexReader = new BitmapInvertedIndexReader(invIndexBuffer, (int) cardinality);
+
+ //TODO: REMOVE DEBUG START
+ for (int dictId = 0; dictId < dictionary.length(); dictId++) {
+ System.out.println("Key = " + new String(dictionary.getBytes(dictId)));
+ System.out.println("Posting List = " + invertedIndexReader.getDocIds(dictId));
+ }
+ //TODO: REMOVE DEBUG END
+
+ long flattened2RootDocIdStartOffset = bufferStartOffsets[FLATTENED_2_ROOT_INDEX];
+ long flattened2RootDocIdBufferSize = bufferSizeArray[FLATTENED_2_ROOT_INDEX];
+ numFlattenedDocs = bufferSizeArray[FLATTENED_2_ROOT_INDEX] / Integer.BYTES;
+ flattened2RootDocIdBuffer = pinotDataBuffer.view(flattened2RootDocIdStartOffset, flattened2RootDocIdStartOffset + flattened2RootDocIdBufferSize);
+ //TODO: REMOVE DEBUG START
+ for (int i = 0; i < numFlattenedDocs; i++) {
+ System.out.println("flattenedDocId: " + i + " rootDodId:" + flattened2RootDocIdBuffer.getInt(i * Integer.BYTES));
+ }
+ //TODO: REMOVE DEBUG END
+ }
+
+ /**
+ * Returns the matching document ids for the given search query.
+ */
+ public MutableRoaringBitmap getMatchingDocIds(Predicate predicate) {
+
+ PredicateEvaluator predicateEvaluator =
+ PredicateEvaluatorProvider.getPredicateEvaluator(predicate, dictionary, FieldSpec.DataType.BYTES);
+ boolean exclusive = predicateEvaluator.isExclusive();
+ int[] dictIds = exclusive ? predicateEvaluator.getNonMatchingDictIds() : predicateEvaluator.getMatchingDictIds();
+ int numDictIds = dictIds.length;
+
+ if (numDictIds == 1) {
+ ImmutableRoaringBitmap docIds = (ImmutableRoaringBitmap) invertedIndexReader.getDocIds(dictIds[0]);
+ if (exclusive) {
+ if (docIds instanceof MutableRoaringBitmap) {
+ MutableRoaringBitmap mutableRoaringBitmap = (MutableRoaringBitmap) docIds;
+ mutableRoaringBitmap.flip(0L, numFlattenedDocs);
+ return mutableRoaringBitmap;
+ } else {
+ return ImmutableRoaringBitmap.flip(docIds, 0L, numFlattenedDocs);
+ }
+ } else {
+ return docIds.toMutableRoaringBitmap();
+ }
+ } else {
+ ImmutableRoaringBitmap[] bitmaps = new ImmutableRoaringBitmap[numDictIds];
+ for (int i = 0; i < numDictIds; i++) {
+ bitmaps[i] = (ImmutableRoaringBitmap) invertedIndexReader.getDocIds(dictIds[i]);
+ }
+ MutableRoaringBitmap docIds = ImmutableRoaringBitmap.or(bitmaps);
+ if (exclusive) {
+ docIds.flip(0L, numFlattenedDocs);
+ }
+ return docIds;
+ }
+ }
+
+ @Override
+ public void close()
+ throws IOException {
+
+ }
+
+ public int getRootDocId(int flattenedDocId) {
+ return flattened2RootDocIdBuffer.getInt(flattenedDocId * Integer.BYTES);
+ }
+}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/store/ColumnIndexType.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/store/ColumnIndexType.java
index dcd21df..f9063b8 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/store/ColumnIndexType.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/store/ColumnIndexType.java
@@ -25,7 +25,8 @@ public enum ColumnIndexType {
BLOOM_FILTER("bloom_filter"),
NULLVALUE_VECTOR("nullvalue_vector"),
TEXT_INDEX("text_index"),
- RANGE_INDEX("range_index");
+ RANGE_INDEX("range_index"),
+ JSON_INDEX("json_index");
private final String indexName;
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/store/FilePerIndexDirectory.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/store/FilePerIndexDirectory.java
index 51dd2fb..4ffc133 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/store/FilePerIndexDirectory.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/store/FilePerIndexDirectory.java
@@ -26,6 +26,7 @@ import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
import org.apache.pinot.common.segment.ReadMode;
+import org.apache.pinot.core.segment.creator.impl.V1Constants;
import org.apache.pinot.core.segment.creator.impl.text.LuceneTextIndexCreator;
import org.apache.pinot.core.segment.index.metadata.SegmentMetadataImpl;
import org.apache.pinot.core.segment.memory.PinotDataBuffer;
@@ -135,6 +136,10 @@ class FilePerIndexDirectory extends ColumnIndexDirectory {
case TEXT_INDEX:
filename = column + LuceneTextIndexCreator.LUCENE_TEXT_INDEX_FILE_EXTENSION;
break;
+ case JSON_INDEX:
+ filename = column + V1Constants.Indexes.JSON_INDEX_FILE_EXTENSION;
+ break;
+
default:
throw new UnsupportedOperationException("Unknown index type: " + indexType.toString());
}
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/segment/virtualcolumn/VirtualColumnIndexContainer.java b/pinot-core/src/main/java/org/apache/pinot/core/segment/virtualcolumn/VirtualColumnIndexContainer.java
index dc67771..712de29 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/segment/virtualcolumn/VirtualColumnIndexContainer.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/segment/virtualcolumn/VirtualColumnIndexContainer.java
@@ -24,6 +24,7 @@ import org.apache.pinot.core.segment.index.readers.BloomFilterReader;
import org.apache.pinot.core.segment.index.readers.Dictionary;
import org.apache.pinot.core.segment.index.readers.ForwardIndexReader;
import org.apache.pinot.core.segment.index.readers.InvertedIndexReader;
+import org.apache.pinot.core.segment.index.readers.JSONIndexReader;
import org.apache.pinot.core.segment.index.readers.NullValueVectorReaderImpl;
import org.apache.pinot.core.segment.index.readers.TextIndexReader;
@@ -79,6 +80,11 @@ public class VirtualColumnIndexContainer implements ColumnIndexContainer {
}
@Override
+ public JSONIndexReader getJSONIndex() {
+ return null;
+ }
+
+ @Override
public void close()
throws IOException {
_forwardIndex.close();
diff --git a/pinot-core/src/main/java/org/apache/pinot/core/startree/v2/store/StarTreeDataSource.java b/pinot-core/src/main/java/org/apache/pinot/core/startree/v2/store/StarTreeDataSource.java
index c24fb04..868cb47 100644
--- a/pinot-core/src/main/java/org/apache/pinot/core/startree/v2/store/StarTreeDataSource.java
+++ b/pinot-core/src/main/java/org/apache/pinot/core/startree/v2/store/StarTreeDataSource.java
@@ -32,7 +32,8 @@ public class StarTreeDataSource extends BaseDataSource {
public StarTreeDataSource(FieldSpec fieldSpec, int numDocs, ForwardIndexReader<?> forwardIndex,
@Nullable Dictionary dictionary) {
- super(new StarTreeDataSourceMetadata(fieldSpec, numDocs), forwardIndex, dictionary, null, null, null, null, null);
+ super(new StarTreeDataSourceMetadata(fieldSpec, numDocs), forwardIndex, dictionary, null, null, null, null, null,
+ null);
}
private static final class StarTreeDataSourceMetadata implements DataSourceMetadata {
diff --git a/pinot-spi/src/main/java/org/apache/pinot/spi/config/table/IndexingConfig.java b/pinot-spi/src/main/java/org/apache/pinot/spi/config/table/IndexingConfig.java
index 3dd137b..7965654 100644
--- a/pinot-spi/src/main/java/org/apache/pinot/spi/config/table/IndexingConfig.java
+++ b/pinot-spi/src/main/java/org/apache/pinot/spi/config/table/IndexingConfig.java
@@ -27,6 +27,7 @@ import org.apache.pinot.spi.config.BaseJsonConfig;
public class IndexingConfig extends BaseJsonConfig {
private List<String> _invertedIndexColumns;
private List<String> _rangeIndexColumns;
+ private List<String> _jsonIndexColumns;
private boolean _autoGeneratedInvertedIndex;
private boolean _createInvertedIndexDuringSegmentGeneration;
private List<String> _sortedColumn;
@@ -71,6 +72,14 @@ public class IndexingConfig extends BaseJsonConfig {
_rangeIndexColumns = rangeIndexColumns;
}
+ public List<String> getJsonIndexColumns() {
+ return _jsonIndexColumns;
+ }
+
+ public void setJsonIndexColumns(List<String> jsonIndexColumns) {
+ _jsonIndexColumns = jsonIndexColumns;
+ }
+
public boolean isAutoGeneratedInvertedIndex() {
return _autoGeneratedInvertedIndex;
}
diff --git a/pinot-spi/src/main/java/org/apache/pinot/spi/data/FieldSpec.java b/pinot-spi/src/main/java/org/apache/pinot/spi/data/FieldSpec.java
index b04ecb7..c9c245b 100644
--- a/pinot-spi/src/main/java/org/apache/pinot/spi/data/FieldSpec.java
+++ b/pinot-spi/src/main/java/org/apache/pinot/spi/data/FieldSpec.java
@@ -337,7 +337,7 @@ public abstract class FieldSpec implements Comparable<FieldSpec> {
public enum DataType {
// LIST is for complex lists which is different from multi-value column of primitives
// STRUCT, MAP and LIST are composable to form a COMPLEX field
- INT, LONG, FLOAT, DOUBLE, BOOLEAN/* Stored as STRING */, STRING, BYTES, STRUCT, MAP, LIST;
+ INT, LONG, FLOAT, DOUBLE, BOOLEAN/* Stored as STRING */, STRING, BYTES, STRUCT, MAP, LIST, JSON;
/**
* Returns the data type stored in Pinot.
diff --git a/pinot-tools/src/main/java/org/apache/pinot/tools/admin/command/JSONQuickstart.java b/pinot-tools/src/main/java/org/apache/pinot/tools/admin/command/JSONQuickstart.java
new file mode 100644
index 0000000..3a5246d
--- /dev/null
+++ b/pinot-tools/src/main/java/org/apache/pinot/tools/admin/command/JSONQuickstart.java
@@ -0,0 +1,224 @@
+package org.apache.pinot.tools.admin.command;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.net.URL;
+import java.util.Random;
+import org.apache.commons.io.FileUtils;
+import org.apache.pinot.spi.data.readers.FileFormat;
+import org.apache.pinot.spi.plugin.PluginManager;
+import org.apache.pinot.tools.QuickstartTableRequest;
+
+
+public class JSONQuickstart {
+ private static final String TAB = "\t\t";
+ private static final String NEW_LINE = "\n";
+
+ public enum Color {
+ RESET("\u001B[0m"), GREEN("\u001B[32m"), YELLOW("\u001B[33m"), CYAN("\u001B[36m");
+
+ public String _code;
+
+ Color(String code) {
+ _code = code;
+ }
+ }
+
+ public static void printStatus(JSONQuickstart.Color color, String message) {
+ System.out.println(color._code + message + JSONQuickstart.Color.RESET._code);
+ }
+
+ public static String prettyPrintResponse(JsonNode response) {
+ StringBuilder responseBuilder = new StringBuilder();
+
+ // Sql Results
+ if (response.has("resultTable")) {
+ JsonNode columns = response.get("resultTable").get("dataSchema").get("columnNames");
+ int numColumns = columns.size();
+ for (int i = 0; i < numColumns; i++) {
+ responseBuilder.append(columns.get(i).asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ JsonNode rows = response.get("resultTable").get("rows");
+ for (int i = 0; i < rows.size(); i++) {
+ JsonNode row = rows.get(i);
+ for (int j = 0; j < numColumns; j++) {
+ responseBuilder.append(row.get(j).asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ }
+ return responseBuilder.toString();
+ }
+
+ // Selection query
+ if (response.has("selectionResults")) {
+ JsonNode columns = response.get("selectionResults").get("columns");
+ int numColumns = columns.size();
+ for (int i = 0; i < numColumns; i++) {
+ responseBuilder.append(columns.get(i).asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ JsonNode rows = response.get("selectionResults").get("results");
+ int numRows = rows.size();
+ for (int i = 0; i < numRows; i++) {
+ JsonNode row = rows.get(i);
+ for (int j = 0; j < numColumns; j++) {
+ responseBuilder.append(row.get(j).asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ }
+ return responseBuilder.toString();
+ }
+
+ // Aggregation only query
+ if (!response.get("aggregationResults").get(0).has("groupByResult")) {
+ JsonNode aggregationResults = response.get("aggregationResults");
+ int numAggregations = aggregationResults.size();
+ for (int i = 0; i < numAggregations; i++) {
+ responseBuilder.append(aggregationResults.get(i).get("function").asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ for (int i = 0; i < numAggregations; i++) {
+ responseBuilder.append(aggregationResults.get(i).get("value").asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ return responseBuilder.toString();
+ }
+
+ // Aggregation group-by query
+ JsonNode groupByResults = response.get("aggregationResults");
+ int numGroupBys = groupByResults.size();
+ for (int i = 0; i < numGroupBys; i++) {
+ JsonNode groupByResult = groupByResults.get(i);
+ responseBuilder.append(groupByResult.get("function").asText()).append(TAB);
+ JsonNode columns = groupByResult.get("groupByColumns");
+ int numColumns = columns.size();
+ for (int j = 0; j < numColumns; j++) {
+ responseBuilder.append(columns.get(j).asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ JsonNode rows = groupByResult.get("groupByResult");
+ int numRows = rows.size();
+ for (int j = 0; j < numRows; j++) {
+ JsonNode row = rows.get(j);
+ responseBuilder.append(row.get("value").asText()).append(TAB);
+ JsonNode columnValues = row.get("group");
+ for (int k = 0; k < numColumns; k++) {
+ responseBuilder.append(columnValues.get(k).asText()).append(TAB);
+ }
+ responseBuilder.append(NEW_LINE);
+ }
+ }
+ return responseBuilder.toString();
+ }
+
+ public void execute()
+ throws Exception {
+ File quickstartTmpDir = new File(FileUtils.getTempDirectory(), String.valueOf(System.currentTimeMillis()));
+ File configDir = new File(quickstartTmpDir, "configs");
+ File dataDir = new File(quickstartTmpDir, "data");
+ Preconditions.checkState(configDir.mkdirs());
+ Preconditions.checkState(dataDir.mkdirs());
+
+ File schemaFile = new File(configDir, "super_schema.json");
+ File dataFile = new File(configDir, "super_data.csv");
+ File tableConfigFile = new File(configDir, "super_offline_table_config.json");
+ File ingestionJobSpecFile = new File(configDir, "ingestionJobSpec.yaml");
+
+ ClassLoader classLoader = JSONQuickstart.class.getClassLoader();
+ URL resource = classLoader.getResource("examples/batch/super/super_schema.json");
+ com.google.common.base.Preconditions.checkNotNull(resource);
+ FileUtils.copyURLToFile(resource, schemaFile);
+ resource = classLoader.getResource("examples/batch/super/rawdata/super_data.csv");
+ com.google.common.base.Preconditions.checkNotNull(resource);
+ FileUtils.copyURLToFile(resource, dataFile);
+ resource = classLoader.getResource("examples/batch/super/ingestionJobSpec.yaml");
+ com.google.common.base.Preconditions.checkNotNull(resource);
+ FileUtils.copyURLToFile(resource, ingestionJobSpecFile);
+ resource = classLoader.getResource("examples/batch/super/super_offline_table_config.json");
+ com.google.common.base.Preconditions.checkNotNull(resource);
+ FileUtils.copyURLToFile(resource, tableConfigFile);
+
+ QuickstartTableRequest request =
+ new QuickstartTableRequest("super", schemaFile, tableConfigFile, ingestionJobSpecFile, FileFormat.CSV);
+ final QuickstartRunner runner = new QuickstartRunner(Lists.newArrayList(request), 1, 1, 1, dataDir);
+
+ printStatus(JSONQuickstart.Color.CYAN, "***** Starting Zookeeper, controller, broker and server *****");
+ runner.startAll();
+ Runtime.getRuntime().addShutdownHook(new Thread(() -> {
+ try {
+ printStatus(JSONQuickstart.Color.GREEN, "***** Shutting down offline quick start *****");
+ runner.stop();
+ FileUtils.deleteDirectory(quickstartTmpDir);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }));
+ printStatus(JSONQuickstart.Color.CYAN, "***** Adding super table *****");
+ runner.addTable();
+ printStatus(JSONQuickstart.Color.CYAN,
+ "***** Launch data ingestion job to build index segment for super and push to controller *****");
+ runner.launchDataIngestionJob();
+ printStatus(JSONQuickstart.Color.CYAN,
+ "***** Waiting for 5 seconds for the server to fetch the assigned segment *****");
+ Thread.sleep(5000);
+
+ printStatus(JSONQuickstart.Color.YELLOW, "***** Offline quickstart setup complete *****");
+
+ }
+
+ public static void main(String[] args)
+ throws Exception {
+ PluginManager.get().init();
+// generateFile(new File(
+// "pinot-tools/src/main/resources/examples/batch/super/rawdata/super_data.csv"));
+
+ new JSONQuickstart().execute();
+ }
+
+ static void generateFile(File file)
+ throws IOException {
+ if(false) {
+ return;
+ }
+ ObjectMapper mapper = new ObjectMapper();
+ Random random = new Random();
+ file.delete();
+ BufferedWriter bw = new BufferedWriter(new FileWriter(file, false));
+// CSVPrinter csvPrinter = new CSVPrinter(bw, CSVFormat.newFormat(',').withRecordSeparator('\n'));
+// csvPrinter.printRecord("person");
+ bw.write("person");
+ bw.newLine();
+ for (int i = 0; i < 100; i++) {
+ ObjectNode person = mapper.createObjectNode();
+ person.put("name", "adam-" + i);
+ person.put("age", random.nextInt(100));
+ ArrayNode arrayNode = mapper.createArrayNode();
+ for (int j = 0; j < 3; j++) {
+ ObjectNode address = mapper.createObjectNode();
+ address.put("street", "street-" + i);
+ address.put("country", "us");
+ arrayNode.add(address);
+ }
+ person.set("addresses", arrayNode);
+ String personJSON = mapper.writer().writeValueAsString(person);
+// csvPrinter.printRecord(personJSON.replaceAll("\"", "\"\""));
+ bw.write("\"");
+ bw.write(personJSON.replaceAll("\"", "\"\""));
+ bw.write("\"");
+ bw.newLine();
+
+ }
+// csvPrinter.close();
+// System.out.println("csvPrinter = " + csvPrinter);
+ bw.close();
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@pinot.apache.org
For additional commands, e-mail: commits-help@pinot.apache.org