You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by ja...@apache.org on 2014/05/12 04:48:23 UTC
[01/10] git commit: DRILL-689: Disable dense decimal data type
Repository: incubator-drill
Updated Branches:
refs/heads/master 45e14df5a -> cdc5daed5
DRILL-689: Disable dense decimal data type
Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/4ffef018
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/4ffef018
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/4ffef018
Branch: refs/heads/master
Commit: 4ffef0183cbe336e30503ceee647b21128ba7738
Parents: 45e14df
Author: Mehant Baid <me...@gmail.com>
Authored: Sun May 11 02:08:31 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Sun May 11 16:09:23 2014 -0700
----------------------------------------------------------------------
.../templates/DecimalAggrTypeFunctions1.java | 10 +++++++---
.../drill/exec/planner/logical/DrillOptiq.java | 18 ++++++------------
2 files changed, 13 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/4ffef018/exec/java-exec/src/main/codegen/templates/DecimalAggrTypeFunctions1.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/DecimalAggrTypeFunctions1.java b/exec/java-exec/src/main/codegen/templates/DecimalAggrTypeFunctions1.java
index b899986..5e02ff0 100644
--- a/exec/java-exec/src/main/codegen/templates/DecimalAggrTypeFunctions1.java
+++ b/exec/java-exec/src/main/codegen/templates/DecimalAggrTypeFunctions1.java
@@ -68,14 +68,18 @@ public static class ${type.inputType}${aggrtype.className} implements DrillAggFu
buffer = new io.netty.buffer.SwappedByteBuf(buffer);
value.buffer = buffer;
value.start = 0;
+ <#if aggrtype.funcName == "max">
for (int i = 0; i < value.nDecimalDigits; i++) {
value.setInteger(i, 0xFFFFFFFF);
}
- <#if aggrtype.funcName == "min">
+ value.sign = true;
+ <#elseif aggrtype.funcName == "min">
+ for (int i = 0; i < value.nDecimalDigits; i++) {
+ value.setInteger(i, 0x7FFFFFFF);
+ }
// Set sign to be positive so initial value is maximum
value.sign = false;
- <#elseif aggrtype.funcName == "max">
- value.sign = true;
+ value.precision = ${type.runningType}Holder.maxPrecision;
</#if>
<#elseif type.outputType == "Decimal9" || type.outputType == "Decimal18">
value.value = ${type.initValue};
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/4ffef018/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
index 73e835a..7efd714 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
@@ -230,20 +230,16 @@ public class DrillOptiq {
int scale = call.getType().getScale();
if (precision <= 9) {
- castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build();
+ castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL9).setPrecision(precision).setScale(scale).build();
} else if (precision <= 18) {
- castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build();
+ castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL18).setPrecision(precision).setScale(scale).build();
} else if (precision <= 28) {
- // Inject a cast to SPARSE before casting to the dense type.
- castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build();
- arg = FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
- castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28DENSE).setPrecision(precision).setScale(scale).build();
+ // Inject a cast to SPARSE before casting to the dense type.
+ castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL28SPARSE).setPrecision(precision).setScale(scale).build();
} else if (precision <= 38) {
- castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build();
- arg = FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
- castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38DENSE).setPrecision(precision).setScale(scale).build();
+ castType = TypeProtos.MajorType.newBuilder().setMinorType(MinorType.DECIMAL38SPARSE).setPrecision(precision).setScale(scale).build();
} else {
- throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported");
+ throw new UnsupportedOperationException("Only Decimal types with precision range 0 - 38 is supported");
}
break;
@@ -252,9 +248,7 @@ public class DrillOptiq {
case "ANY": return arg; // Type will be same as argument.
default: castType = Types.required(MinorType.valueOf(call.getType().getSqlTypeName().getName()));
}
-
return FunctionCallFactory.createCast(castType, ExpressionPosition.UNKNOWN, arg);
-
}
private LogicalExpression getDrillFunctionFromOptiqCall(RexCall call) {
[04/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/protocol/src/main/java/org/apache/drill/exec/proto/SchemaDefProtos.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/SchemaDefProtos.java b/protocol/src/main/java/org/apache/drill/exec/proto/SchemaDefProtos.java
index 0093789..d7f3536 100644
--- a/protocol/src/main/java/org/apache/drill/exec/proto/SchemaDefProtos.java
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/SchemaDefProtos.java
@@ -116,1748 +116,6 @@ public final class SchemaDefProtos {
// @@protoc_insertion_point(enum_scope:exec.ValueMode)
}
- public interface NamePartOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // optional .exec.NamePart.Type type = 1;
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- boolean hasType();
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type getType();
-
- // optional string name = 2;
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- boolean hasName();
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- java.lang.String getName();
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- com.google.protobuf.ByteString
- getNameBytes();
- }
- /**
- * Protobuf type {@code exec.NamePart}
- */
- public static final class NamePart extends
- com.google.protobuf.GeneratedMessage
- implements NamePartOrBuilder {
- // Use NamePart.newBuilder() to construct.
- private NamePart(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private NamePart(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final NamePart defaultInstance;
- public static NamePart getDefaultInstance() {
- return defaultInstance;
- }
-
- public NamePart getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private NamePart(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 8: {
- int rawValue = input.readEnum();
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type value = org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type.valueOf(rawValue);
- if (value == null) {
- unknownFields.mergeVarintField(1, rawValue);
- } else {
- bitField0_ |= 0x00000001;
- type_ = value;
- }
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- name_ = input.readBytes();
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_NamePart_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_NamePart_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart.class, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder.class);
- }
-
- public static com.google.protobuf.Parser<NamePart> PARSER =
- new com.google.protobuf.AbstractParser<NamePart>() {
- public NamePart parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new NamePart(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<NamePart> getParserForType() {
- return PARSER;
- }
-
- /**
- * Protobuf enum {@code exec.NamePart.Type}
- */
- public enum Type
- implements com.google.protobuf.ProtocolMessageEnum {
- /**
- * <code>NAME = 0;</code>
- */
- NAME(0, 0),
- /**
- * <code>ARRAY = 1;</code>
- */
- ARRAY(1, 1),
- ;
-
- /**
- * <code>NAME = 0;</code>
- */
- public static final int NAME_VALUE = 0;
- /**
- * <code>ARRAY = 1;</code>
- */
- public static final int ARRAY_VALUE = 1;
-
-
- public final int getNumber() { return value; }
-
- public static Type valueOf(int value) {
- switch (value) {
- case 0: return NAME;
- case 1: return ARRAY;
- default: return null;
- }
- }
-
- public static com.google.protobuf.Internal.EnumLiteMap<Type>
- internalGetValueMap() {
- return internalValueMap;
- }
- private static com.google.protobuf.Internal.EnumLiteMap<Type>
- internalValueMap =
- new com.google.protobuf.Internal.EnumLiteMap<Type>() {
- public Type findValueByNumber(int number) {
- return Type.valueOf(number);
- }
- };
-
- public final com.google.protobuf.Descriptors.EnumValueDescriptor
- getValueDescriptor() {
- return getDescriptor().getValues().get(index);
- }
- public final com.google.protobuf.Descriptors.EnumDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
- public static final com.google.protobuf.Descriptors.EnumDescriptor
- getDescriptor() {
- return org.apache.drill.exec.proto.SchemaDefProtos.NamePart.getDescriptor().getEnumTypes().get(0);
- }
-
- private static final Type[] VALUES = values();
-
- public static Type valueOf(
- com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
- if (desc.getType() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "EnumValueDescriptor is not for this type.");
- }
- return VALUES[desc.getIndex()];
- }
-
- private final int index;
- private final int value;
-
- private Type(int index, int value) {
- this.index = index;
- this.value = value;
- }
-
- // @@protoc_insertion_point(enum_scope:exec.NamePart.Type)
- }
-
- private int bitField0_;
- // optional .exec.NamePart.Type type = 1;
- public static final int TYPE_FIELD_NUMBER = 1;
- private org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type type_;
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- public boolean hasType() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type getType() {
- return type_;
- }
-
- // optional string name = 2;
- public static final int NAME_FIELD_NUMBER = 2;
- private java.lang.Object name_;
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public boolean hasName() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public java.lang.String getName() {
- java.lang.Object ref = name_;
- if (ref instanceof java.lang.String) {
- return (java.lang.String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- java.lang.String s = bs.toStringUtf8();
- if (bs.isValidUtf8()) {
- name_ = s;
- }
- return s;
- }
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public com.google.protobuf.ByteString
- getNameBytes() {
- java.lang.Object ref = name_;
- if (ref instanceof java.lang.String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- name_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- private void initFields() {
- type_ = org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type.NAME;
- name_ = "";
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeEnum(1, type_.getNumber());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getNameBytes());
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(1, type_.getNumber());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getNameBytes());
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.NamePart parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.drill.exec.proto.SchemaDefProtos.NamePart prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code exec.NamePart}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_NamePart_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_NamePart_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart.class, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder.class);
- }
-
- // Construct using org.apache.drill.exec.proto.SchemaDefProtos.NamePart.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- type_ = org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type.NAME;
- bitField0_ = (bitField0_ & ~0x00000001);
- name_ = "";
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_NamePart_descriptor;
- }
-
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart getDefaultInstanceForType() {
- return org.apache.drill.exec.proto.SchemaDefProtos.NamePart.getDefaultInstance();
- }
-
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart build() {
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart buildPartial() {
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart result = new org.apache.drill.exec.proto.SchemaDefProtos.NamePart(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.type_ = type_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- result.name_ = name_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.drill.exec.proto.SchemaDefProtos.NamePart) {
- return mergeFrom((org.apache.drill.exec.proto.SchemaDefProtos.NamePart)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.drill.exec.proto.SchemaDefProtos.NamePart other) {
- if (other == org.apache.drill.exec.proto.SchemaDefProtos.NamePart.getDefaultInstance()) return this;
- if (other.hasType()) {
- setType(other.getType());
- }
- if (other.hasName()) {
- bitField0_ |= 0x00000002;
- name_ = other.name_;
- onChanged();
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.drill.exec.proto.SchemaDefProtos.NamePart) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // optional .exec.NamePart.Type type = 1;
- private org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type type_ = org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type.NAME;
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- public boolean hasType() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type getType() {
- return type_;
- }
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- public Builder setType(org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- type_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional .exec.NamePart.Type type = 1;</code>
- */
- public Builder clearType() {
- bitField0_ = (bitField0_ & ~0x00000001);
- type_ = org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type.NAME;
- onChanged();
- return this;
- }
-
- // optional string name = 2;
- private java.lang.Object name_ = "";
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public boolean hasName() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public java.lang.String getName() {
- java.lang.Object ref = name_;
- if (!(ref instanceof java.lang.String)) {
- java.lang.String s = ((com.google.protobuf.ByteString) ref)
- .toStringUtf8();
- name_ = s;
- return s;
- } else {
- return (java.lang.String) ref;
- }
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public com.google.protobuf.ByteString
- getNameBytes() {
- java.lang.Object ref = name_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8(
- (java.lang.String) ref);
- name_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public Builder setName(
- java.lang.String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- name_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public Builder clearName() {
- bitField0_ = (bitField0_ & ~0x00000002);
- name_ = getDefaultInstance().getName();
- onChanged();
- return this;
- }
- /**
- * <code>optional string name = 2;</code>
- *
- * <pre>
- * only required if this is a named type.
- * </pre>
- */
- public Builder setNameBytes(
- com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- name_ = value;
- onChanged();
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:exec.NamePart)
- }
-
- static {
- defaultInstance = new NamePart(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:exec.NamePart)
- }
-
- public interface FieldDefOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // repeated .exec.NamePart name = 1;
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- java.util.List<org.apache.drill.exec.proto.SchemaDefProtos.NamePart>
- getNameList();
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart getName(int index);
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- int getNameCount();
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- java.util.List<? extends org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder>
- getNameOrBuilderList();
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder getNameOrBuilder(
- int index);
-
- // optional .common.MajorType major_type = 2;
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- boolean hasMajorType();
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- org.apache.drill.common.types.TypeProtos.MajorType getMajorType();
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder getMajorTypeOrBuilder();
- }
- /**
- * Protobuf type {@code exec.FieldDef}
- */
- public static final class FieldDef extends
- com.google.protobuf.GeneratedMessage
- implements FieldDefOrBuilder {
- // Use FieldDef.newBuilder() to construct.
- private FieldDef(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
- super(builder);
- this.unknownFields = builder.getUnknownFields();
- }
- private FieldDef(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
- private static final FieldDef defaultInstance;
- public static FieldDef getDefaultInstance() {
- return defaultInstance;
- }
-
- public FieldDef getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- private final com.google.protobuf.UnknownFieldSet unknownFields;
- @java.lang.Override
- public final com.google.protobuf.UnknownFieldSet
- getUnknownFields() {
- return this.unknownFields;
- }
- private FieldDef(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- initFields();
- int mutable_bitField0_ = 0;
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder();
- try {
- boolean done = false;
- while (!done) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- done = true;
- break;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- done = true;
- }
- break;
- }
- case 10: {
- if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- name_ = new java.util.ArrayList<org.apache.drill.exec.proto.SchemaDefProtos.NamePart>();
- mutable_bitField0_ |= 0x00000001;
- }
- name_.add(input.readMessage(org.apache.drill.exec.proto.SchemaDefProtos.NamePart.PARSER, extensionRegistry));
- break;
- }
- case 18: {
- org.apache.drill.common.types.TypeProtos.MajorType.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- subBuilder = majorType_.toBuilder();
- }
- majorType_ = input.readMessage(org.apache.drill.common.types.TypeProtos.MajorType.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(majorType_);
- majorType_ = subBuilder.buildPartial();
- }
- bitField0_ |= 0x00000001;
- break;
- }
- }
- }
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- throw e.setUnfinishedMessage(this);
- } catch (java.io.IOException e) {
- throw new com.google.protobuf.InvalidProtocolBufferException(
- e.getMessage()).setUnfinishedMessage(this);
- } finally {
- if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- name_ = java.util.Collections.unmodifiableList(name_);
- }
- this.unknownFields = unknownFields.build();
- makeExtensionsImmutable();
- }
- }
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_FieldDef_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_FieldDef_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.class, org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.Builder.class);
- }
-
- public static com.google.protobuf.Parser<FieldDef> PARSER =
- new com.google.protobuf.AbstractParser<FieldDef>() {
- public FieldDef parsePartialFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return new FieldDef(input, extensionRegistry);
- }
- };
-
- @java.lang.Override
- public com.google.protobuf.Parser<FieldDef> getParserForType() {
- return PARSER;
- }
-
- private int bitField0_;
- // repeated .exec.NamePart name = 1;
- public static final int NAME_FIELD_NUMBER = 1;
- private java.util.List<org.apache.drill.exec.proto.SchemaDefProtos.NamePart> name_;
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public java.util.List<org.apache.drill.exec.proto.SchemaDefProtos.NamePart> getNameList() {
- return name_;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public java.util.List<? extends org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder>
- getNameOrBuilderList() {
- return name_;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public int getNameCount() {
- return name_.size();
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart getName(int index) {
- return name_.get(index);
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder getNameOrBuilder(
- int index) {
- return name_.get(index);
- }
-
- // optional .common.MajorType major_type = 2;
- public static final int MAJOR_TYPE_FIELD_NUMBER = 2;
- private org.apache.drill.common.types.TypeProtos.MajorType majorType_;
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public boolean hasMajorType() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public org.apache.drill.common.types.TypeProtos.MajorType getMajorType() {
- return majorType_;
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder getMajorTypeOrBuilder() {
- return majorType_;
- }
-
- private void initFields() {
- name_ = java.util.Collections.emptyList();
- majorType_ = org.apache.drill.common.types.TypeProtos.MajorType.getDefaultInstance();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- for (int i = 0; i < name_.size(); i++) {
- output.writeMessage(1, name_.get(i));
- }
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(2, majorType_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- for (int i = 0; i < name_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, name_.get(i));
- }
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(2, majorType_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return PARSER.parseFrom(data, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseDelimitedFrom(input, extensionRegistry);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return PARSER.parseFrom(input);
- }
- public static org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return PARSER.parseFrom(input, extensionRegistry);
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.drill.exec.proto.SchemaDefProtos.FieldDef prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- /**
- * Protobuf type {@code exec.FieldDef}
- */
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder<Builder>
- implements org.apache.drill.exec.proto.SchemaDefProtos.FieldDefOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_FieldDef_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_FieldDef_fieldAccessorTable
- .ensureFieldAccessorsInitialized(
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.class, org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.Builder.class);
- }
-
- // Construct using org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getNameFieldBuilder();
- getMajorTypeFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- if (nameBuilder_ == null) {
- name_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000001);
- } else {
- nameBuilder_.clear();
- }
- if (majorTypeBuilder_ == null) {
- majorType_ = org.apache.drill.common.types.TypeProtos.MajorType.getDefaultInstance();
- } else {
- majorTypeBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.drill.exec.proto.SchemaDefProtos.internal_static_exec_FieldDef_descriptor;
- }
-
- public org.apache.drill.exec.proto.SchemaDefProtos.FieldDef getDefaultInstanceForType() {
- return org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.getDefaultInstance();
- }
-
- public org.apache.drill.exec.proto.SchemaDefProtos.FieldDef build() {
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- public org.apache.drill.exec.proto.SchemaDefProtos.FieldDef buildPartial() {
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef result = new org.apache.drill.exec.proto.SchemaDefProtos.FieldDef(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (nameBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- name_ = java.util.Collections.unmodifiableList(name_);
- bitField0_ = (bitField0_ & ~0x00000001);
- }
- result.name_ = name_;
- } else {
- result.name_ = nameBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000001;
- }
- if (majorTypeBuilder_ == null) {
- result.majorType_ = majorType_;
- } else {
- result.majorType_ = majorTypeBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.drill.exec.proto.SchemaDefProtos.FieldDef) {
- return mergeFrom((org.apache.drill.exec.proto.SchemaDefProtos.FieldDef)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.drill.exec.proto.SchemaDefProtos.FieldDef other) {
- if (other == org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.getDefaultInstance()) return this;
- if (nameBuilder_ == null) {
- if (!other.name_.isEmpty()) {
- if (name_.isEmpty()) {
- name_ = other.name_;
- bitField0_ = (bitField0_ & ~0x00000001);
- } else {
- ensureNameIsMutable();
- name_.addAll(other.name_);
- }
- onChanged();
- }
- } else {
- if (!other.name_.isEmpty()) {
- if (nameBuilder_.isEmpty()) {
- nameBuilder_.dispose();
- nameBuilder_ = null;
- name_ = other.name_;
- bitField0_ = (bitField0_ & ~0x00000001);
- nameBuilder_ =
- com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
- getNameFieldBuilder() : null;
- } else {
- nameBuilder_.addAllMessages(other.name_);
- }
- }
- }
- if (other.hasMajorType()) {
- mergeMajorType(other.getMajorType());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef parsedMessage = null;
- try {
- parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
- } catch (com.google.protobuf.InvalidProtocolBufferException e) {
- parsedMessage = (org.apache.drill.exec.proto.SchemaDefProtos.FieldDef) e.getUnfinishedMessage();
- throw e;
- } finally {
- if (parsedMessage != null) {
- mergeFrom(parsedMessage);
- }
- }
- return this;
- }
- private int bitField0_;
-
- // repeated .exec.NamePart name = 1;
- private java.util.List<org.apache.drill.exec.proto.SchemaDefProtos.NamePart> name_ =
- java.util.Collections.emptyList();
- private void ensureNameIsMutable() {
- if (!((bitField0_ & 0x00000001) == 0x00000001)) {
- name_ = new java.util.ArrayList<org.apache.drill.exec.proto.SchemaDefProtos.NamePart>(name_);
- bitField0_ |= 0x00000001;
- }
- }
-
- private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder, org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder> nameBuilder_;
-
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public java.util.List<org.apache.drill.exec.proto.SchemaDefProtos.NamePart> getNameList() {
- if (nameBuilder_ == null) {
- return java.util.Collections.unmodifiableList(name_);
- } else {
- return nameBuilder_.getMessageList();
- }
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public int getNameCount() {
- if (nameBuilder_ == null) {
- return name_.size();
- } else {
- return nameBuilder_.getCount();
- }
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart getName(int index) {
- if (nameBuilder_ == null) {
- return name_.get(index);
- } else {
- return nameBuilder_.getMessage(index);
- }
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder setName(
- int index, org.apache.drill.exec.proto.SchemaDefProtos.NamePart value) {
- if (nameBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureNameIsMutable();
- name_.set(index, value);
- onChanged();
- } else {
- nameBuilder_.setMessage(index, value);
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder setName(
- int index, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder builderForValue) {
- if (nameBuilder_ == null) {
- ensureNameIsMutable();
- name_.set(index, builderForValue.build());
- onChanged();
- } else {
- nameBuilder_.setMessage(index, builderForValue.build());
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder addName(org.apache.drill.exec.proto.SchemaDefProtos.NamePart value) {
- if (nameBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureNameIsMutable();
- name_.add(value);
- onChanged();
- } else {
- nameBuilder_.addMessage(value);
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder addName(
- int index, org.apache.drill.exec.proto.SchemaDefProtos.NamePart value) {
- if (nameBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureNameIsMutable();
- name_.add(index, value);
- onChanged();
- } else {
- nameBuilder_.addMessage(index, value);
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder addName(
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder builderForValue) {
- if (nameBuilder_ == null) {
- ensureNameIsMutable();
- name_.add(builderForValue.build());
- onChanged();
- } else {
- nameBuilder_.addMessage(builderForValue.build());
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder addName(
- int index, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder builderForValue) {
- if (nameBuilder_ == null) {
- ensureNameIsMutable();
- name_.add(index, builderForValue.build());
- onChanged();
- } else {
- nameBuilder_.addMessage(index, builderForValue.build());
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder addAllName(
- java.lang.Iterable<? extends org.apache.drill.exec.proto.SchemaDefProtos.NamePart> values) {
- if (nameBuilder_ == null) {
- ensureNameIsMutable();
- super.addAll(values, name_);
- onChanged();
- } else {
- nameBuilder_.addAllMessages(values);
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder clearName() {
- if (nameBuilder_ == null) {
- name_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000001);
- onChanged();
- } else {
- nameBuilder_.clear();
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public Builder removeName(int index) {
- if (nameBuilder_ == null) {
- ensureNameIsMutable();
- name_.remove(index);
- onChanged();
- } else {
- nameBuilder_.remove(index);
- }
- return this;
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder getNameBuilder(
- int index) {
- return getNameFieldBuilder().getBuilder(index);
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder getNameOrBuilder(
- int index) {
- if (nameBuilder_ == null) {
- return name_.get(index); } else {
- return nameBuilder_.getMessageOrBuilder(index);
- }
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public java.util.List<? extends org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder>
- getNameOrBuilderList() {
- if (nameBuilder_ != null) {
- return nameBuilder_.getMessageOrBuilderList();
- } else {
- return java.util.Collections.unmodifiableList(name_);
- }
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder addNameBuilder() {
- return getNameFieldBuilder().addBuilder(
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart.getDefaultInstance());
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder addNameBuilder(
- int index) {
- return getNameFieldBuilder().addBuilder(
- index, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.getDefaultInstance());
- }
- /**
- * <code>repeated .exec.NamePart name = 1;</code>
- *
- * <pre>
- * multipart description of entire field name
- * </pre>
- */
- public java.util.List<org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder>
- getNameBuilderList() {
- return getNameFieldBuilder().getBuilderList();
- }
- private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder, org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder>
- getNameFieldBuilder() {
- if (nameBuilder_ == null) {
- nameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.drill.exec.proto.SchemaDefProtos.NamePart, org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Builder, org.apache.drill.exec.proto.SchemaDefProtos.NamePartOrBuilder>(
- name_,
- ((bitField0_ & 0x00000001) == 0x00000001),
- getParentForChildren(),
- isClean());
- name_ = null;
- }
- return nameBuilder_;
- }
-
- // optional .common.MajorType major_type = 2;
- private org.apache.drill.common.types.TypeProtos.MajorType majorType_ = org.apache.drill.common.types.TypeProtos.MajorType.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.drill.common.types.TypeProtos.MajorType, org.apache.drill.common.types.TypeProtos.MajorType.Builder, org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder> majorTypeBuilder_;
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public boolean hasMajorType() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public org.apache.drill.common.types.TypeProtos.MajorType getMajorType() {
- if (majorTypeBuilder_ == null) {
- return majorType_;
- } else {
- return majorTypeBuilder_.getMessage();
- }
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public Builder setMajorType(org.apache.drill.common.types.TypeProtos.MajorType value) {
- if (majorTypeBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- majorType_ = value;
- onChanged();
- } else {
- majorTypeBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public Builder setMajorType(
- org.apache.drill.common.types.TypeProtos.MajorType.Builder builderForValue) {
- if (majorTypeBuilder_ == null) {
- majorType_ = builderForValue.build();
- onChanged();
- } else {
- majorTypeBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public Builder mergeMajorType(org.apache.drill.common.types.TypeProtos.MajorType value) {
- if (majorTypeBuilder_ == null) {
- if (((bitField0_ & 0x00000002) == 0x00000002) &&
- majorType_ != org.apache.drill.common.types.TypeProtos.MajorType.getDefaultInstance()) {
- majorType_ =
- org.apache.drill.common.types.TypeProtos.MajorType.newBuilder(majorType_).mergeFrom(value).buildPartial();
- } else {
- majorType_ = value;
- }
- onChanged();
- } else {
- majorTypeBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public Builder clearMajorType() {
- if (majorTypeBuilder_ == null) {
- majorType_ = org.apache.drill.common.types.TypeProtos.MajorType.getDefaultInstance();
- onChanged();
- } else {
- majorTypeBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public org.apache.drill.common.types.TypeProtos.MajorType.Builder getMajorTypeBuilder() {
- bitField0_ |= 0x00000002;
- onChanged();
- return getMajorTypeFieldBuilder().getBuilder();
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- public org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder getMajorTypeOrBuilder() {
- if (majorTypeBuilder_ != null) {
- return majorTypeBuilder_.getMessageOrBuilder();
- } else {
- return majorType_;
- }
- }
- /**
- * <code>optional .common.MajorType major_type = 2;</code>
- *
- * <pre>
- * the type associated with this field.
- * </pre>
- */
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.drill.common.types.TypeProtos.MajorType, org.apache.drill.common.types.TypeProtos.MajorType.Builder, org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder>
- getMajorTypeFieldBuilder() {
- if (majorTypeBuilder_ == null) {
- majorTypeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.drill.common.types.TypeProtos.MajorType, org.apache.drill.common.types.TypeProtos.MajorType.Builder, org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder>(
- majorType_,
- getParentForChildren(),
- isClean());
- majorType_ = null;
- }
- return majorTypeBuilder_;
- }
-
- // @@protoc_insertion_point(builder_scope:exec.FieldDef)
- }
-
- static {
- defaultInstance = new FieldDef(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:exec.FieldDef)
- }
-
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_NamePart_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_NamePart_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_exec_FieldDef_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_exec_FieldDef_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -1867,32 +125,16 @@ public final class SchemaDefProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
- "\n\017SchemaDef.proto\022\004exec\032\013Types.proto\"X\n\010" +
- "NamePart\022!\n\004type\030\001 \001(\0162\023.exec.NamePart.T" +
- "ype\022\014\n\004name\030\002 \001(\t\"\033\n\004Type\022\010\n\004NAME\020\000\022\t\n\005A" +
- "RRAY\020\001\"O\n\010FieldDef\022\034\n\004name\030\001 \003(\0132\016.exec." +
- "NamePart\022%\n\nmajor_type\030\002 \001(\0132\021.common.Ma" +
- "jorType*0\n\tValueMode\022\020\n\014VALUE_VECTOR\020\000\022\007" +
- "\n\003RLE\020\001\022\010\n\004DICT\020\002B0\n\033org.apache.drill.ex" +
- "ec.protoB\017SchemaDefProtosH\001"
+ "\n\017SchemaDef.proto\022\004exec\032\013Types.proto*0\n\t" +
+ "ValueMode\022\020\n\014VALUE_VECTOR\020\000\022\007\n\003RLE\020\001\022\010\n\004" +
+ "DICT\020\002B0\n\033org.apache.drill.exec.protoB\017S" +
+ "chemaDefProtosH\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
- internal_static_exec_NamePart_descriptor =
- getDescriptor().getMessageTypes().get(0);
- internal_static_exec_NamePart_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_exec_NamePart_descriptor,
- new java.lang.String[] { "Type", "Name", });
- internal_static_exec_FieldDef_descriptor =
- getDescriptor().getMessageTypes().get(1);
- internal_static_exec_FieldDef_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_exec_FieldDef_descriptor,
- new java.lang.String[] { "Name", "MajorType", });
return null;
}
};
[03/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java b/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java
index 99b4df8..0631f6a 100644
--- a/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java
+++ b/protocol/src/main/java/org/apache/drill/exec/proto/UserBitShared.java
@@ -3128,40 +3128,40 @@ public final class UserBitShared {
public interface RecordBatchDefOrBuilder
extends com.google.protobuf.MessageOrBuilder {
- // repeated .exec.shared.FieldMetadata field = 1;
+ // optional int32 record_count = 1;
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>optional int32 record_count = 1;</code>
*/
- java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata>
- getFieldList();
+ boolean hasRecordCount();
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>optional int32 record_count = 1;</code>
*/
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata getField(int index);
+ int getRecordCount();
+
+ // repeated .exec.shared.SerializedField field = 2;
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- int getFieldCount();
+ java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField>
+ getFieldList();
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>
- getFieldOrBuilderList();
+ org.apache.drill.exec.proto.UserBitShared.SerializedField getField(int index);
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder getFieldOrBuilder(
- int index);
-
- // optional int32 record_count = 2;
+ int getFieldCount();
/**
- * <code>optional int32 record_count = 2;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- boolean hasRecordCount();
+ java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>
+ getFieldOrBuilderList();
/**
- * <code>optional int32 record_count = 2;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- int getRecordCount();
+ org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder getFieldOrBuilder(
+ int index);
// optional bool is_selection_vector_2 = 3;
/**
@@ -3224,19 +3224,19 @@ public final class UserBitShared {
}
break;
}
- case 10: {
- if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
- field_ = new java.util.ArrayList<org.apache.drill.exec.proto.UserBitShared.FieldMetadata>();
- mutable_bitField0_ |= 0x00000001;
- }
- field_.add(input.readMessage(org.apache.drill.exec.proto.UserBitShared.FieldMetadata.PARSER, extensionRegistry));
- break;
- }
- case 16: {
+ case 8: {
bitField0_ |= 0x00000001;
recordCount_ = input.readInt32();
break;
}
+ case 18: {
+ if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
+ field_ = new java.util.ArrayList<org.apache.drill.exec.proto.UserBitShared.SerializedField>();
+ mutable_bitField0_ |= 0x00000002;
+ }
+ field_.add(input.readMessage(org.apache.drill.exec.proto.UserBitShared.SerializedField.PARSER, extensionRegistry));
+ break;
+ }
case 24: {
bitField0_ |= 0x00000002;
isSelectionVector2_ = input.readBool();
@@ -3250,7 +3250,7 @@ public final class UserBitShared {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
- if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
field_ = java.util.Collections.unmodifiableList(field_);
}
this.unknownFields = unknownFields.build();
@@ -3285,58 +3285,58 @@ public final class UserBitShared {
}
private int bitField0_;
- // repeated .exec.shared.FieldMetadata field = 1;
- public static final int FIELD_FIELD_NUMBER = 1;
- private java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata> field_;
+ // optional int32 record_count = 1;
+ public static final int RECORD_COUNT_FIELD_NUMBER = 1;
+ private int recordCount_;
+ /**
+ * <code>optional int32 record_count = 1;</code>
+ */
+ public boolean hasRecordCount() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional int32 record_count = 1;</code>
+ */
+ public int getRecordCount() {
+ return recordCount_;
+ }
+
+ // repeated .exec.shared.SerializedField field = 2;
+ public static final int FIELD_FIELD_NUMBER = 2;
+ private java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField> field_;
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata> getFieldList() {
+ public java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField> getFieldList() {
return field_;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>
+ public java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>
getFieldOrBuilderList() {
return field_;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public int getFieldCount() {
return field_.size();
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadata getField(int index) {
+ public org.apache.drill.exec.proto.UserBitShared.SerializedField getField(int index) {
return field_.get(index);
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder getFieldOrBuilder(
+ public org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder getFieldOrBuilder(
int index) {
return field_.get(index);
}
- // optional int32 record_count = 2;
- public static final int RECORD_COUNT_FIELD_NUMBER = 2;
- private int recordCount_;
- /**
- * <code>optional int32 record_count = 2;</code>
- */
- public boolean hasRecordCount() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- /**
- * <code>optional int32 record_count = 2;</code>
- */
- public int getRecordCount() {
- return recordCount_;
- }
-
// optional bool is_selection_vector_2 = 3;
public static final int IS_SELECTION_VECTOR_2_FIELD_NUMBER = 3;
private boolean isSelectionVector2_;
@@ -3354,8 +3354,8 @@ public final class UserBitShared {
}
private void initFields() {
- field_ = java.util.Collections.emptyList();
recordCount_ = 0;
+ field_ = java.util.Collections.emptyList();
isSelectionVector2_ = false;
}
private byte memoizedIsInitialized = -1;
@@ -3370,11 +3370,11 @@ public final class UserBitShared {
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
- for (int i = 0; i < field_.size(); i++) {
- output.writeMessage(1, field_.get(i));
- }
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeInt32(2, recordCount_);
+ output.writeInt32(1, recordCount_);
+ }
+ for (int i = 0; i < field_.size(); i++) {
+ output.writeMessage(2, field_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, isSelectionVector2_);
@@ -3388,13 +3388,13 @@ public final class UserBitShared {
if (size != -1) return size;
size = 0;
- for (int i = 0; i < field_.size(); i++) {
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, field_.get(i));
+ .computeInt32Size(1, recordCount_);
}
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ for (int i = 0; i < field_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
- .computeInt32Size(2, recordCount_);
+ .computeMessageSize(2, field_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
@@ -3517,14 +3517,14 @@ public final class UserBitShared {
public Builder clear() {
super.clear();
+ recordCount_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000001);
if (fieldBuilder_ == null) {
field_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000001);
+ bitField0_ = (bitField0_ & ~0x00000002);
} else {
fieldBuilder_.clear();
}
- recordCount_ = 0;
- bitField0_ = (bitField0_ & ~0x00000002);
isSelectionVector2_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
@@ -3555,19 +3555,19 @@ public final class UserBitShared {
org.apache.drill.exec.proto.UserBitShared.RecordBatchDef result = new org.apache.drill.exec.proto.UserBitShared.RecordBatchDef(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.recordCount_ = recordCount_;
if (fieldBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
field_ = java.util.Collections.unmodifiableList(field_);
- bitField0_ = (bitField0_ & ~0x00000001);
+ bitField0_ = (bitField0_ & ~0x00000002);
}
result.field_ = field_;
} else {
result.field_ = fieldBuilder_.build();
}
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000001;
- }
- result.recordCount_ = recordCount_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
@@ -3588,11 +3588,14 @@ public final class UserBitShared {
public Builder mergeFrom(org.apache.drill.exec.proto.UserBitShared.RecordBatchDef other) {
if (other == org.apache.drill.exec.proto.UserBitShared.RecordBatchDef.getDefaultInstance()) return this;
+ if (other.hasRecordCount()) {
+ setRecordCount(other.getRecordCount());
+ }
if (fieldBuilder_ == null) {
if (!other.field_.isEmpty()) {
if (field_.isEmpty()) {
field_ = other.field_;
- bitField0_ = (bitField0_ & ~0x00000001);
+ bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFieldIsMutable();
field_.addAll(other.field_);
@@ -3605,7 +3608,7 @@ public final class UserBitShared {
fieldBuilder_.dispose();
fieldBuilder_ = null;
field_ = other.field_;
- bitField0_ = (bitField0_ & ~0x00000001);
+ bitField0_ = (bitField0_ & ~0x00000002);
fieldBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getFieldFieldBuilder() : null;
@@ -3614,9 +3617,6 @@ public final class UserBitShared {
}
}
}
- if (other.hasRecordCount()) {
- setRecordCount(other.getRecordCount());
- }
if (other.hasIsSelectionVector2()) {
setIsSelectionVector2(other.getIsSelectionVector2());
}
@@ -3647,23 +3647,56 @@ public final class UserBitShared {
}
private int bitField0_;
- // repeated .exec.shared.FieldMetadata field = 1;
- private java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata> field_ =
+ // optional int32 record_count = 1;
+ private int recordCount_ ;
+ /**
+ * <code>optional int32 record_count = 1;</code>
+ */
+ public boolean hasRecordCount() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional int32 record_count = 1;</code>
+ */
+ public int getRecordCount() {
+ return recordCount_;
+ }
+ /**
+ * <code>optional int32 record_count = 1;</code>
+ */
+ public Builder setRecordCount(int value) {
+ bitField0_ |= 0x00000001;
+ recordCount_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional int32 record_count = 1;</code>
+ */
+ public Builder clearRecordCount() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ recordCount_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // repeated .exec.shared.SerializedField field = 2;
+ private java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField> field_ =
java.util.Collections.emptyList();
private void ensureFieldIsMutable() {
- if (!((bitField0_ & 0x00000001) == 0x00000001)) {
- field_ = new java.util.ArrayList<org.apache.drill.exec.proto.UserBitShared.FieldMetadata>(field_);
- bitField0_ |= 0x00000001;
+ if (!((bitField0_ & 0x00000002) == 0x00000002)) {
+ field_ = new java.util.ArrayList<org.apache.drill.exec.proto.UserBitShared.SerializedField>(field_);
+ bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder, org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder> fieldBuilder_;
+ org.apache.drill.exec.proto.UserBitShared.SerializedField, org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder, org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder> fieldBuilder_;
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata> getFieldList() {
+ public java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField> getFieldList() {
if (fieldBuilder_ == null) {
return java.util.Collections.unmodifiableList(field_);
} else {
@@ -3671,7 +3704,7 @@ public final class UserBitShared {
}
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public int getFieldCount() {
if (fieldBuilder_ == null) {
@@ -3681,9 +3714,9 @@ public final class UserBitShared {
}
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadata getField(int index) {
+ public org.apache.drill.exec.proto.UserBitShared.SerializedField getField(int index) {
if (fieldBuilder_ == null) {
return field_.get(index);
} else {
@@ -3691,10 +3724,10 @@ public final class UserBitShared {
}
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder setField(
- int index, org.apache.drill.exec.proto.UserBitShared.FieldMetadata value) {
+ int index, org.apache.drill.exec.proto.UserBitShared.SerializedField value) {
if (fieldBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3708,10 +3741,10 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder setField(
- int index, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder builderForValue) {
+ int index, org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder builderForValue) {
if (fieldBuilder_ == null) {
ensureFieldIsMutable();
field_.set(index, builderForValue.build());
@@ -3722,9 +3755,9 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public Builder addField(org.apache.drill.exec.proto.UserBitShared.FieldMetadata value) {
+ public Builder addField(org.apache.drill.exec.proto.UserBitShared.SerializedField value) {
if (fieldBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3738,10 +3771,10 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder addField(
- int index, org.apache.drill.exec.proto.UserBitShared.FieldMetadata value) {
+ int index, org.apache.drill.exec.proto.UserBitShared.SerializedField value) {
if (fieldBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
@@ -3755,10 +3788,10 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder addField(
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder builderForValue) {
+ org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder builderForValue) {
if (fieldBuilder_ == null) {
ensureFieldIsMutable();
field_.add(builderForValue.build());
@@ -3769,10 +3802,10 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder addField(
- int index, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder builderForValue) {
+ int index, org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder builderForValue) {
if (fieldBuilder_ == null) {
ensureFieldIsMutable();
field_.add(index, builderForValue.build());
@@ -3783,10 +3816,10 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder addAllField(
- java.lang.Iterable<? extends org.apache.drill.exec.proto.UserBitShared.FieldMetadata> values) {
+ java.lang.Iterable<? extends org.apache.drill.exec.proto.UserBitShared.SerializedField> values) {
if (fieldBuilder_ == null) {
ensureFieldIsMutable();
super.addAll(values, field_);
@@ -3797,12 +3830,12 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder clearField() {
if (fieldBuilder_ == null) {
field_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000001);
+ bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
fieldBuilder_.clear();
@@ -3810,7 +3843,7 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
public Builder removeField(int index) {
if (fieldBuilder_ == null) {
@@ -3823,16 +3856,16 @@ public final class UserBitShared {
return this;
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder getFieldBuilder(
+ public org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder getFieldBuilder(
int index) {
return getFieldFieldBuilder().getBuilder(index);
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder getFieldOrBuilder(
+ public org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder getFieldOrBuilder(
int index) {
if (fieldBuilder_ == null) {
return field_.get(index); } else {
@@ -3840,9 +3873,9 @@ public final class UserBitShared {
}
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>
+ public java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>
getFieldOrBuilderList() {
if (fieldBuilder_ != null) {
return fieldBuilder_.getMessageOrBuilderList();
@@ -3851,35 +3884,35 @@ public final class UserBitShared {
}
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder addFieldBuilder() {
+ public org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder addFieldBuilder() {
return getFieldFieldBuilder().addBuilder(
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata.getDefaultInstance());
+ org.apache.drill.exec.proto.UserBitShared.SerializedField.getDefaultInstance());
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder addFieldBuilder(
+ public org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder addFieldBuilder(
int index) {
return getFieldFieldBuilder().addBuilder(
- index, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.getDefaultInstance());
+ index, org.apache.drill.exec.proto.UserBitShared.SerializedField.getDefaultInstance());
}
/**
- * <code>repeated .exec.shared.FieldMetadata field = 1;</code>
+ * <code>repeated .exec.shared.SerializedField field = 2;</code>
*/
- public java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder>
+ public java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder>
getFieldBuilderList() {
return getFieldFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder, org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>
+ org.apache.drill.exec.proto.UserBitShared.SerializedField, org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder, org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>
getFieldFieldBuilder() {
if (fieldBuilder_ == null) {
fieldBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder, org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>(
+ org.apache.drill.exec.proto.UserBitShared.SerializedField, org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder, org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>(
field_,
- ((bitField0_ & 0x00000001) == 0x00000001),
+ ((bitField0_ & 0x00000002) == 0x00000002),
getParentForChildren(),
isClean());
field_ = null;
@@ -3887,39 +3920,6 @@ public final class UserBitShared {
return fieldBuilder_;
}
- // optional int32 record_count = 2;
- private int recordCount_ ;
- /**
- * <code>optional int32 record_count = 2;</code>
- */
- public boolean hasRecordCount() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- /**
- * <code>optional int32 record_count = 2;</code>
- */
- public int getRecordCount() {
- return recordCount_;
- }
- /**
- * <code>optional int32 record_count = 2;</code>
- */
- public Builder setRecordCount(int value) {
- bitField0_ |= 0x00000002;
- recordCount_ = value;
- onChanged();
- return this;
- }
- /**
- * <code>optional int32 record_count = 2;</code>
- */
- public Builder clearRecordCount() {
- bitField0_ = (bitField0_ & ~0x00000002);
- recordCount_ = 0;
- onChanged();
- return this;
- }
-
// optional bool is_selection_vector_2 = 3;
private boolean isSelectionVector2_ ;
/**
@@ -3964,115 +3964,67 @@ public final class UserBitShared {
// @@protoc_insertion_point(class_scope:exec.shared.RecordBatchDef)
}
- public interface FieldMetadataOrBuilder
+ public interface NamePartOrBuilder
extends com.google.protobuf.MessageOrBuilder {
- // optional .exec.FieldDef def = 1;
- /**
- * <code>optional .exec.FieldDef def = 1;</code>
- */
- boolean hasDef();
- /**
- * <code>optional .exec.FieldDef def = 1;</code>
- */
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef getDef();
- /**
- * <code>optional .exec.FieldDef def = 1;</code>
- */
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDefOrBuilder getDefOrBuilder();
-
- // optional int32 value_count = 2;
- /**
- * <code>optional int32 value_count = 2;</code>
- */
- boolean hasValueCount();
- /**
- * <code>optional int32 value_count = 2;</code>
- */
- int getValueCount();
-
- // optional int32 var_byte_length = 3;
+ // optional .exec.shared.NamePart.Type type = 1;
/**
- * <code>optional int32 var_byte_length = 3;</code>
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
*/
- boolean hasVarByteLength();
+ boolean hasType();
/**
- * <code>optional int32 var_byte_length = 3;</code>
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
*/
- int getVarByteLength();
+ org.apache.drill.exec.proto.UserBitShared.NamePart.Type getType();
- // optional int32 group_count = 4;
- /**
- * <code>optional int32 group_count = 4;</code>
- *
- * <pre>
- * number of groups. (number of repeated records)
- * </pre>
- */
- boolean hasGroupCount();
+ // optional string name = 2;
/**
- * <code>optional int32 group_count = 4;</code>
- *
- * <pre>
- * number of groups. (number of repeated records)
- * </pre>
+ * <code>optional string name = 2;</code>
*/
- int getGroupCount();
-
- // optional int32 buffer_length = 5;
+ boolean hasName();
/**
- * <code>optional int32 buffer_length = 5;</code>
+ * <code>optional string name = 2;</code>
*/
- boolean hasBufferLength();
+ java.lang.String getName();
/**
- * <code>optional int32 buffer_length = 5;</code>
+ * <code>optional string name = 2;</code>
*/
- int getBufferLength();
+ com.google.protobuf.ByteString
+ getNameBytes();
- // repeated .exec.shared.FieldMetadata child = 6;
- /**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
- */
- java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata>
- getChildList();
- /**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
- */
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata getChild(int index);
+ // optional .exec.shared.NamePart child = 3;
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>optional .exec.shared.NamePart child = 3;</code>
*/
- int getChildCount();
+ boolean hasChild();
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>optional .exec.shared.NamePart child = 3;</code>
*/
- java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>
- getChildOrBuilderList();
+ org.apache.drill.exec.proto.UserBitShared.NamePart getChild();
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>optional .exec.shared.NamePart child = 3;</code>
*/
- org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder getChildOrBuilder(
- int index);
+ org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder getChildOrBuilder();
}
/**
- * Protobuf type {@code exec.shared.FieldMetadata}
+ * Protobuf type {@code exec.shared.NamePart}
*/
- public static final class FieldMetadata extends
+ public static final class NamePart extends
com.google.protobuf.GeneratedMessage
- implements FieldMetadataOrBuilder {
- // Use FieldMetadata.newBuilder() to construct.
- private FieldMetadata(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ implements NamePartOrBuilder {
+ // Use NamePart.newBuilder() to construct.
+ private NamePart(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
- private FieldMetadata(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+ private NamePart(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
- private static final FieldMetadata defaultInstance;
- public static FieldMetadata getDefaultInstance() {
+ private static final NamePart defaultInstance;
+ public static NamePart getDefaultInstance() {
return defaultInstance;
}
- public FieldMetadata getDefaultInstanceForType() {
+ public NamePart getDefaultInstanceForType() {
return defaultInstance;
}
@@ -4082,7 +4034,7 @@ public final class UserBitShared {
getUnknownFields() {
return this.unknownFields;
}
- private FieldMetadata(
+ private NamePart(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
@@ -4105,45 +4057,33 @@ public final class UserBitShared {
}
break;
}
- case 10: {
- org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.Builder subBuilder = null;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- subBuilder = def_.toBuilder();
- }
- def_ = input.readMessage(org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.PARSER, extensionRegistry);
- if (subBuilder != null) {
- subBuilder.mergeFrom(def_);
- def_ = subBuilder.buildPartial();
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.drill.exec.proto.UserBitShared.NamePart.Type value = org.apache.drill.exec.proto.UserBitShared.NamePart.Type.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ bitField0_ |= 0x00000001;
+ type_ = value;
}
- bitField0_ |= 0x00000001;
break;
}
- case 16: {
+ case 18: {
bitField0_ |= 0x00000002;
- valueCount_ = input.readInt32();
- break;
- }
- case 24: {
- bitField0_ |= 0x00000004;
- varByteLength_ = input.readInt32();
- break;
- }
- case 32: {
- bitField0_ |= 0x00000008;
- groupCount_ = input.readInt32();
- break;
- }
- case 40: {
- bitField0_ |= 0x00000010;
- bufferLength_ = input.readInt32();
+ name_ = input.readBytes();
break;
}
- case 50: {
- if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
- child_ = new java.util.ArrayList<org.apache.drill.exec.proto.UserBitShared.FieldMetadata>();
- mutable_bitField0_ |= 0x00000020;
+ case 26: {
+ org.apache.drill.exec.proto.UserBitShared.NamePart.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ subBuilder = child_.toBuilder();
+ }
+ child_ = input.readMessage(org.apache.drill.exec.proto.UserBitShared.NamePart.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(child_);
+ child_ = subBuilder.buildPartial();
}
- child_.add(input.readMessage(org.apache.drill.exec.proto.UserBitShared.FieldMetadata.PARSER, extensionRegistry));
+ bitField0_ |= 0x00000004;
break;
}
}
@@ -4154,178 +4094,1187 @@ public final class UserBitShared {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
- if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
- child_ = java.util.Collections.unmodifiableList(child_);
- }
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
- return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_FieldMetadata_descriptor;
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_NamePart_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
- return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_FieldMetadata_fieldAccessorTable
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_NamePart_fieldAccessorTable
.ensureFieldAccessorsInitialized(
- org.apache.drill.exec.proto.UserBitShared.FieldMetadata.class, org.apache.drill.exec.proto.UserBitShared.FieldMetadata.Builder.class);
+ org.apache.drill.exec.proto.UserBitShared.NamePart.class, org.apache.drill.exec.proto.UserBitShared.NamePart.Builder.class);
}
- public static com.google.protobuf.Parser<FieldMetadata> PARSER =
- new com.google.protobuf.AbstractParser<FieldMetadata>() {
- public FieldMetadata parsePartialFrom(
+ public static com.google.protobuf.Parser<NamePart> PARSER =
+ new com.google.protobuf.AbstractParser<NamePart>() {
+ public NamePart parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
- return new FieldMetadata(input, extensionRegistry);
+ return new NamePart(input, extensionRegistry);
}
};
@java.lang.Override
- public com.google.protobuf.Parser<FieldMetadata> getParserForType() {
+ public com.google.protobuf.Parser<NamePart> getParserForType() {
return PARSER;
}
- private int bitField0_;
- // optional .exec.FieldDef def = 1;
- public static final int DEF_FIELD_NUMBER = 1;
- private org.apache.drill.exec.proto.SchemaDefProtos.FieldDef def_;
/**
- * <code>optional .exec.FieldDef def = 1;</code>
+ * Protobuf enum {@code exec.shared.NamePart.Type}
*/
- public boolean hasDef() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
+ public enum Type
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>NAME = 0;</code>
+ */
+ NAME(0, 0),
+ /**
+ * <code>ARRAY = 1;</code>
+ */
+ ARRAY(1, 1),
+ ;
+
+ /**
+ * <code>NAME = 0;</code>
+ */
+ public static final int NAME_VALUE = 0;
+ /**
+ * <code>ARRAY = 1;</code>
+ */
+ public static final int ARRAY_VALUE = 1;
+
+
+ public final int getNumber() { return value; }
+
+ public static Type valueOf(int value) {
+ switch (value) {
+ case 0: return NAME;
+ case 1: return ARRAY;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<Type>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<Type>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<Type>() {
+ public Type findValueByNumber(int number) {
+ return Type.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserBitShared.NamePart.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final Type[] VALUES = values();
+
+ public static Type valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private Type(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:exec.shared.NamePart.Type)
}
+
+ private int bitField0_;
+ // optional .exec.shared.NamePart.Type type = 1;
+ public static final int TYPE_FIELD_NUMBER = 1;
+ private org.apache.drill.exec.proto.UserBitShared.NamePart.Type type_;
/**
- * <code>optional .exec.FieldDef def = 1;</code>
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
*/
- public org.apache.drill.exec.proto.SchemaDefProtos.FieldDef getDef() {
- return def_;
+ public boolean hasType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>optional .exec.FieldDef def = 1;</code>
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
*/
- public org.apache.drill.exec.proto.SchemaDefProtos.FieldDefOrBuilder getDefOrBuilder() {
- return def_;
+ public org.apache.drill.exec.proto.UserBitShared.NamePart.Type getType() {
+ return type_;
}
- // optional int32 value_count = 2;
- public static final int VALUE_COUNT_FIELD_NUMBER = 2;
- private int valueCount_;
+ // optional string name = 2;
+ public static final int NAME_FIELD_NUMBER = 2;
+ private java.lang.Object name_;
/**
- * <code>optional int32 value_count = 2;</code>
+ * <code>optional string name = 2;</code>
*/
- public boolean hasValueCount() {
+ public boolean hasName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
- * <code>optional int32 value_count = 2;</code>
+ * <code>optional string name = 2;</code>
*/
- public int getValueCount() {
- return valueCount_;
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ name_ = s;
+ }
+ return s;
+ }
}
-
- // optional int32 var_byte_length = 3;
- public static final int VAR_BYTE_LENGTH_FIELD_NUMBER = 3;
- private int varByteLength_;
/**
- * <code>optional int32 var_byte_length = 3;</code>
+ * <code>optional string name = 2;</code>
*/
- public boolean hasVarByteLength() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- /**
- * <code>optional int32 var_byte_length = 3;</code>
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional .exec.shared.NamePart child = 3;
+ public static final int CHILD_FIELD_NUMBER = 3;
+ private org.apache.drill.exec.proto.UserBitShared.NamePart child_;
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public boolean hasChild() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePart getChild() {
+ return child_;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder getChildOrBuilder() {
+ return child_;
+ }
+
+ private void initFields() {
+ type_ = org.apache.drill.exec.proto.UserBitShared.NamePart.Type.NAME;
+ name_ = "";
+ child_ = org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeEnum(1, type_.getNumber());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, child_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(1, type_.getNumber());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getNameBytes());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, child_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.drill.exec.proto.UserBitShared.NamePart parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.drill.exec.proto.UserBitShared.NamePart prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code exec.shared.NamePart}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_NamePart_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_NamePart_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserBitShared.NamePart.class, org.apache.drill.exec.proto.UserBitShared.NamePart.Builder.class);
+ }
+
+ // Construct using org.apache.drill.exec.proto.UserBitShared.NamePart.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getChildFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ type_ = org.apache.drill.exec.proto.UserBitShared.NamePart.Type.NAME;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ name_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (childBuilder_ == null) {
+ child_ = org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance();
+ } else {
+ childBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_NamePart_descriptor;
+ }
+
+ public org.apache.drill.exec.proto.UserBitShared.NamePart getDefaultInstanceForType() {
+ return org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance();
+ }
+
+ public org.apache.drill.exec.proto.UserBitShared.NamePart build() {
+ org.apache.drill.exec.proto.UserBitShared.NamePart result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.drill.exec.proto.UserBitShared.NamePart buildPartial() {
+ org.apache.drill.exec.proto.UserBitShared.NamePart result = new org.apache.drill.exec.proto.UserBitShared.NamePart(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.type_ = type_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.name_ = name_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (childBuilder_ == null) {
+ result.child_ = child_;
+ } else {
+ result.child_ = childBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.drill.exec.proto.UserBitShared.NamePart) {
+ return mergeFrom((org.apache.drill.exec.proto.UserBitShared.NamePart)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.drill.exec.proto.UserBitShared.NamePart other) {
+ if (other == org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance()) return this;
+ if (other.hasType()) {
+ setType(other.getType());
+ }
+ if (other.hasName()) {
+ bitField0_ |= 0x00000002;
+ name_ = other.name_;
+ onChanged();
+ }
+ if (other.hasChild()) {
+ mergeChild(other.getChild());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.drill.exec.proto.UserBitShared.NamePart parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.drill.exec.proto.UserBitShared.NamePart) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional .exec.shared.NamePart.Type type = 1;
+ private org.apache.drill.exec.proto.UserBitShared.NamePart.Type type_ = org.apache.drill.exec.proto.UserBitShared.NamePart.Type.NAME;
+ /**
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
+ */
+ public boolean hasType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePart.Type getType() {
+ return type_;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
+ */
+ public Builder setType(org.apache.drill.exec.proto.UserBitShared.NamePart.Type value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ type_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart.Type type = 1;</code>
+ */
+ public Builder clearType() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ type_ = org.apache.drill.exec.proto.UserBitShared.NamePart.Type.NAME;
+ onChanged();
+ return this;
+ }
+
+ // optional string name = 2;
+ private java.lang.Object name_ = "";
+ /**
+ * <code>optional string name = 2;</code>
+ */
+ public boolean hasName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional string name = 2;</code>
+ */
+ public java.lang.String getName() {
+ java.lang.Object ref = name_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ name_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * <code>optional string name = 2;</code>
+ */
+ public com.google.protobuf.ByteString
+ getNameBytes() {
+ java.lang.Object ref = name_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ name_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * <code>optional string name = 2;</code>
+ */
+ public Builder setName(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string name = 2;</code>
+ */
+ public Builder clearName() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ name_ = getDefaultInstance().getName();
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional string name = 2;</code>
+ */
+ public Builder setNameBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ name_ = value;
+ onChanged();
+ return this;
+ }
+
+ // optional .exec.shared.NamePart child = 3;
+ private org.apache.drill.exec.proto.UserBitShared.NamePart child_ = org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.drill.exec.proto.UserBitShared.NamePart, org.apache.drill.exec.proto.UserBitShared.NamePart.Builder, org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder> childBuilder_;
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public boolean hasChild() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePart getChild() {
+ if (childBuilder_ == null) {
+ return child_;
+ } else {
+ return childBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public Builder setChild(org.apache.drill.exec.proto.UserBitShared.NamePart value) {
+ if (childBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ child_ = value;
+ onChanged();
+ } else {
+ childBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public Builder setChild(
+ org.apache.drill.exec.proto.UserBitShared.NamePart.Builder builderForValue) {
+ if (childBuilder_ == null) {
+ child_ = builderForValue.build();
+ onChanged();
+ } else {
+ childBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public Builder mergeChild(org.apache.drill.exec.proto.UserBitShared.NamePart value) {
+ if (childBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ child_ != org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance()) {
+ child_ =
+ org.apache.drill.exec.proto.UserBitShared.NamePart.newBuilder(child_).mergeFrom(value).buildPartial();
+ } else {
+ child_ = value;
+ }
+ onChanged();
+ } else {
+ childBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public Builder clearChild() {
+ if (childBuilder_ == null) {
+ child_ = org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance();
+ onChanged();
+ } else {
+ childBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePart.Builder getChildBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getChildFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder getChildOrBuilder() {
+ if (childBuilder_ != null) {
+ return childBuilder_.getMessageOrBuilder();
+ } else {
+ return child_;
+ }
+ }
+ /**
+ * <code>optional .exec.shared.NamePart child = 3;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.drill.exec.proto.UserBitShared.NamePart, org.apache.drill.exec.proto.UserBitShared.NamePart.Builder, org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder>
+ getChildFieldBuilder() {
+ if (childBuilder_ == null) {
+ childBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.drill.exec.proto.UserBitShared.NamePart, org.apache.drill.exec.proto.UserBitShared.NamePart.Builder, org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder>(
+ child_,
+ getParentForChildren(),
+ isClean());
+ child_ = null;
+ }
+ return childBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:exec.shared.NamePart)
+ }
+
+ static {
+ defaultInstance = new NamePart(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:exec.shared.NamePart)
+ }
+
+ public interface SerializedFieldOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional .common.MajorType major_type = 1;
+ /**
+ * <code>optional .common.MajorType major_type = 1;</code>
+ *
+ * <pre>
+ * the type associated with this field.
+ * </pre>
*/
- public int getVarByteLength() {
- return varByteLength_;
+ boolean hasMajorType();
+ /**
+ * <code>optional .common.MajorType major_type = 1;</code>
+ *
+ * <pre>
+ * the type associated with this field.
+ * </pre>
+ */
+ org.apache.drill.common.types.TypeProtos.MajorType getMajorType();
+ /**
+ * <code>optional .common.MajorType major_type = 1;</code>
+ *
+ * <pre>
+ * the type associated with this field.
+ * </pre>
+ */
+ org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder getMajorTypeOrBuilder();
+
+ // optional .exec.shared.NamePart name_part = 2;
+ /**
+ * <code>optional .exec.shared.NamePart name_part = 2;</code>
+ */
+ boolean hasNamePart();
+ /**
+ * <code>optional .exec.shared.NamePart name_part = 2;</code>
+ */
+ org.apache.drill.exec.proto.UserBitShared.NamePart getNamePart();
+ /**
+ * <code>optional .exec.shared.NamePart name_part = 2;</code>
+ */
+ org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder getNamePartOrBuilder();
+
+ // repeated .exec.shared.SerializedField child = 3;
+ /**
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
+ */
+ java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField>
+ getChildList();
+ /**
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
+ */
+ org.apache.drill.exec.proto.UserBitShared.SerializedField getChild(int index);
+ /**
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
+ */
+ int getChildCount();
+ /**
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
+ */
+ java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>
+ getChildOrBuilderList();
+ /**
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
+ */
+ org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder getChildOrBuilder(
+ int index);
+
+ // optional int32 value_count = 4;
+ /**
+ * <code>optional int32 value_count = 4;</code>
+ */
+ boolean hasValueCount();
+ /**
+ * <code>optional int32 value_count = 4;</code>
+ */
+ int getValueCount();
+
+ // optional int32 var_byte_length = 5;
+ /**
+ * <code>optional int32 var_byte_length = 5;</code>
+ */
+ boolean hasVarByteLength();
+ /**
+ * <code>optional int32 var_byte_length = 5;</code>
+ */
+ int getVarByteLength();
+
+ // optional int32 group_count = 6;
+ /**
+ * <code>optional int32 group_count = 6;</code>
+ *
+ * <pre>
+ * number of groups. (number of repeated records)
+ * </pre>
+ */
+ boolean hasGroupCount();
+ /**
+ * <code>optional int32 group_count = 6;</code>
+ *
+ * <pre>
+ * number of groups. (number of repeated records)
+ * </pre>
+ */
+ int getGroupCount();
+
+ // optional int32 buffer_length = 7;
+ /**
+ * <code>optional int32 buffer_length = 7;</code>
+ */
+ boolean hasBufferLength();
+ /**
+ * <code>optional int32 buffer_length = 7;</code>
+ */
+ int getBufferLength();
+ }
+ /**
+ * Protobuf type {@code exec.shared.SerializedField}
+ */
+ public static final class SerializedField extends
+ com.google.protobuf.GeneratedMessage
+ implements SerializedFieldOrBuilder {
+ // Use SerializedField.newBuilder() to construct.
+ private SerializedField(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SerializedField(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SerializedField defaultInstance;
+ public static SerializedField getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SerializedField getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SerializedField(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.drill.common.types.TypeProtos.MajorType.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = majorType_.toBuilder();
+ }
+ majorType_ = input.readMessage(org.apache.drill.common.types.TypeProtos.MajorType.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(majorType_);
+ majorType_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 18: {
+ org.apache.drill.exec.proto.UserBitShared.NamePart.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ subBuilder = namePart_.toBuilder();
+ }
+ namePart_ = input.readMessage(org.apache.drill.exec.proto.UserBitShared.NamePart.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(namePart_);
+ namePart_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000002;
+ break;
+ }
+ case 26: {
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ child_ = new java.util.ArrayList<org.apache.drill.exec.proto.UserBitShared.SerializedField>();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ child_.add(input.readMessage(org.apache.drill.exec.proto.UserBitShared.SerializedField.PARSER, extensionRegistry));
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000004;
+ valueCount_ = input.readInt32();
+ break;
+ }
+ case 40: {
+ bitField0_ |= 0x00000008;
+ varByteLength_ = input.readInt32();
+ break;
+ }
+ case 48: {
+ bitField0_ |= 0x00000010;
+ groupCount_ = input.readInt32();
+ break;
+ }
+ case 56: {
+ bitField0_ |= 0x00000020;
+ bufferLength_ = input.readInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ child_ = java.util.Collections.unmodifiableList(child_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_SerializedField_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.drill.exec.proto.UserBitShared.internal_static_exec_shared_SerializedField_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.drill.exec.proto.UserBitShared.SerializedField.class, org.apache.drill.exec.proto.UserBitShared.SerializedField.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<SerializedField> PARSER =
+ new com.google.protobuf.AbstractParser<SerializedField>() {
+ public SerializedField parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new SerializedField(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<SerializedField> getParserForType() {
+ return PARSER;
}
- // optional int32 group_count = 4;
- public static final int GROUP_COUNT_FIELD_NUMBER = 4;
- private int groupCount_;
+ private int bitField0_;
+ // optional .common.MajorType major_type = 1;
+ public static final int MAJOR_TYPE_FIELD_NUMBER = 1;
+ private org.apache.drill.common.types.TypeProtos.MajorType majorType_;
/**
- * <code>optional int32 group_count = 4;</code>
+ * <code>optional .common.MajorType major_type = 1;</code>
*
* <pre>
- * number of groups. (number of repeated records)
+ * the type associated with this field.
* </pre>
*/
- public boolean hasGroupCount() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
+ public boolean hasMajorType() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
- * <code>optional int32 group_count = 4;</code>
+ * <code>optional .common.MajorType major_type = 1;</code>
*
* <pre>
- * number of groups. (number of repeated records)
+ * the type associated with this field.
* </pre>
*/
- public int getGroupCount() {
- return groupCount_;
+ public org.apache.drill.common.types.TypeProtos.MajorType getMajorType() {
+ return majorType_;
+ }
+ /**
+ * <code>optional .common.MajorType major_type = 1;</code>
+ *
+ * <pre>
+ * the type associated with this field.
+ * </pre>
+ */
+ public org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder getMajorTypeOrBuilder() {
+ return majorType_;
}
- // optional int32 buffer_length = 5;
- public static final int BUFFER_LENGTH_FIELD_NUMBER = 5;
- private int bufferLength_;
+ // optional .exec.shared.NamePart name_part = 2;
+ public static final int NAME_PART_FIELD_NUMBER = 2;
+ private org.apache.drill.exec.proto.UserBitShared.NamePart namePart_;
/**
- * <code>optional int32 buffer_length = 5;</code>
+ * <code>optional .exec.shared.NamePart name_part = 2;</code>
*/
- public boolean hasBufferLength() {
- return ((bitField0_ & 0x00000010) == 0x00000010);
+ public boolean hasNamePart() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
- * <code>optional int32 buffer_length = 5;</code>
+ * <code>optional .exec.shared.NamePart name_part = 2;</code>
*/
- public int getBufferLength() {
- return bufferLength_;
+ public org.apache.drill.exec.proto.UserBitShared.NamePart getNamePart() {
+ return namePart_;
+ }
+ /**
+ * <code>optional .exec.shared.NamePart name_part = 2;</code>
+ */
+ public org.apache.drill.exec.proto.UserBitShared.NamePartOrBuilder getNamePartOrBuilder() {
+ return namePart_;
}
- // repeated .exec.shared.FieldMetadata child = 6;
- public static final int CHILD_FIELD_NUMBER = 6;
- private java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata> child_;
+ // repeated .exec.shared.SerializedField child = 3;
+ public static final int CHILD_FIELD_NUMBER = 3;
+ private java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField> child_;
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
*/
- public java.util.List<org.apache.drill.exec.proto.UserBitShared.FieldMetadata> getChildList() {
+ public java.util.List<org.apache.drill.exec.proto.UserBitShared.SerializedField> getChildList() {
return child_;
}
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
*/
- public java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder>
+ public java.util.List<? extends org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder>
getChildOrBuilderList() {
return child_;
}
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
*/
public int getChildCount() {
return child_.size();
}
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadata getChild(int index) {
+ public org.apache.drill.exec.proto.UserBitShared.SerializedField getChild(int index) {
return child_.get(index);
}
/**
- * <code>repeated .exec.shared.FieldMetadata child = 6;</code>
+ * <code>repeated .exec.shared.SerializedField child = 3;</code>
+ *
+ * <pre>
+ * only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+ * </pre>
*/
- public org.apache.drill.exec.proto.UserBitShared.FieldMetadataOrBuilder getChildOrBuilder(
+ public org.apache.drill.exec.proto.UserBitShared.SerializedFieldOrBuilder getChildOrBuilder(
int index) {
return child_.get(index);
}
+ // optional int32 value_count = 4;
+ public static final int VALUE_COUNT_FIELD_NUMBER = 4;
+ private int valueCount_;
+ /**
+ * <code>optional int32 value_count = 4;</code>
+ */
+ public boolean hasValueCount() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional int32 value_count = 4;</code>
+ */
+ public int getValueCount() {
+ return valueCount_;
+ }
+
+ // optional int32 var_byte_length = 5;
+ public static final int VAR_BYTE_LENGTH_FIELD_NUMBER = 5;
+ private int varByteLength_;
+ /**
+ * <code>optional int32 var_byte_length = 5;</code>
+ */
+ public boolean hasVarByteLength() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>optional int32 var_byte_length = 5;</code>
+ */
+ public int getVarByteLength() {
+ return varByteLength_;
+ }
+
+ // optional int32 group_count = 6;
+ public static final int GROUP_COUNT_FIELD_NUMBER = 6;
+ private int groupCount_;
+ /**
+ * <code>optional int32 group_count = 6;</code>
+ *
+ * <pre>
+ * number of groups. (number of repeated records)
+ * </pre>
+ */
+ public boolean hasGroupCount() {
+ return ((bitField0_ & 0x00000010) == 0x00000010);
+ }
+ /**
+ * <code>optional int32 group_count = 6;</code>
+ *
+ * <pre>
+ * number of groups. (number of repeated records)
+ * </pre>
+ */
+ public int getGroupCount() {
+ return groupCount_;
+ }
+
+ // optional int32 buffer_length = 7;
+ public static final int BUFFER_LENGTH_FIELD_NUMBER = 7;
+ private int bufferLength_;
+ /**
+ * <code>optional int32 buffer_length = 7;</code>
+ */
+ public boolean hasBufferLength() {
+ return ((bitField0_ & 0x00000020) == 0x00000020);
+ }
+ /**
+ * <code>optional int32 buffer_length = 7;</code>
+ */
+ public int getBufferLength() {
+ return bufferLength_;
+ }
+
private void initFields() {
- def_ = org.apache.drill.exec.proto.SchemaDefProtos.FieldDef.getDefaultInstance();
+ majorType_ = org.apache.drill.common.types.TypeProtos.MajorType.getDefaultInstance();
+ namePart_ = org.apache.drill.exec.proto.UserBitShared.NamePart.getDefaultInstance();
+ child_ = java.util.Collections.emptyList();
valueCount_ = 0;
varByteLength_ = 0;
groupCount_ = 0;
bufferLength_ = 0;
- child_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -4340,22 +5289,25 @@ public final class UserBitShared {
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, def_);
+ output.writeMessage(1, majorType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeInt32(2, valueCount_);
+ output.writeMessage(2, namePart_);
+ }
+ for (int i = 0; i < child_.size(); i++) {
+ output.writeMessage(3, child_.get(i));
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeInt32(3, varByteLength_);
+ output.writeInt32(4, valueCount_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
- output.writeInt32(4, groupCount_);
+ output.writeInt32(5, varByteLength_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
- output.writeInt32(5, bufferLength_);
+ output.writeInt32(6, groupCount_);
}
- for (int i = 0; i < child_.size(); i++) {
- output.writeMessage(6, child_.get(i));
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
+ output.writeInt32(7, bufferLength_);
}
getUnknownFields().writeTo(output);
}
@@ -4368,27 +5320,31 @@ public final class UserBitShared {
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, def_);
+ .computeMessageSize(1, majorType_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
- .computeInt32Size(2, valueCount_);
+ .computeMessageSize(2, namePart_);
+ }
+ for (int i = 0; i < child_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, child_.get(i));
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
- .computeInt32Size(3, varByteLength_);
+ .computeInt32Size(4, valueCount_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
- .computeInt32Size(4, groupCount_);
+ .computeInt32Size(5, varByteLength_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
- .computeInt32Size(5, bufferLength_);
+ .computeInt32Size(6, groupCount_);
}
- for (int i = 0; i < child_.size(); i++) {
+ if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(6, child_.get(i));
+ .computeInt32Size(7, bufferLength_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
@@ -4402,53 +5358,53 @@ public final class UserBitShared {
return super.writeReplace();
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(byte[] data)
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(java.io.InputStream input)
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseDelimitedFrom(java.io.InputStream input)
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseDelimitedFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
- public static org.apache.drill.exec.proto.UserBitShared.FieldMetadata parseFrom(
+ public static org.apache.drill.exec.proto.UserBitShared.SerializedField parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
@@ -4457,7 +5413,7 @@ public final class UserBitShared {
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.drill.exec.proto.UserBitShared.FieldMetadata prototype) {
+ public static Builder newBuilder(org.apache.drill.exec.proto.UserBitShared.SerializedField prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@@ -4469,24 +5425,24 @@ public final class UserBitShared {
return builder;
}
/**
- * Protobuf type {@code exec.shared.FieldMetadata}
+ * Protobuf type {@code exec.shared.Seri
<TRUNCATED>
[02/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/protocol/src/main/protobuf/SchemaDef.proto
----------------------------------------------------------------------
diff --git a/protocol/src/main/protobuf/SchemaDef.proto b/protocol/src/main/protobuf/SchemaDef.proto
index 2d6575b..206e9d3 100644
--- a/protocol/src/main/protobuf/SchemaDef.proto
+++ b/protocol/src/main/protobuf/SchemaDef.proto
@@ -12,20 +12,3 @@ enum ValueMode {
RLE = 1;
DICT = 2;
}
-
-message NamePart {
- enum Type{
- NAME = 0;
- ARRAY = 1;
- }
-
- optional Type type = 1;
- optional string name = 2; // only required if this is a named type.
-}
-
-message FieldDef {
- repeated NamePart name = 1; // multipart description of entire field name
- optional common.MajorType major_type = 2; // the type associated with this field.
- //repeated FieldDef field = 3; // only in the cases of type == MAP or REPEATMAP
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/protocol/src/main/protobuf/Types.proto
----------------------------------------------------------------------
diff --git a/protocol/src/main/protobuf/Types.proto b/protocol/src/main/protobuf/Types.proto
index 3871a40..3d5ac38 100644
--- a/protocol/src/main/protobuf/Types.proto
+++ b/protocol/src/main/protobuf/Types.proto
@@ -24,7 +24,7 @@ option optimize_for = SPEED;
enum MinorType {
LATE = 0; // late binding type
MAP = 1; // an empty map column. Useful for conceptual setup. Children listed within here
- REPEATMAP = 2; // a repeated map column (means that multiple children sit below this)
+
TINYINT = 3; // single byte signed integer
SMALLINT = 4; // two byte signed integer
INT = 5; // four byte signed integer
@@ -62,6 +62,7 @@ enum MinorType {
NULL = 37; // a value of unknown type (e.g. a missing reference).
INTERVALYEAR = 38; // Interval type specifying YEAR to MONTH
INTERVALDAY = 39; // Interval type specifying DAY to SECONDS
+ LIST = 40;
}
message MajorType {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/protocol/src/main/protobuf/UserBitShared.proto
----------------------------------------------------------------------
diff --git a/protocol/src/main/protobuf/UserBitShared.proto b/protocol/src/main/protobuf/UserBitShared.proto
index 0eaa163..a3a734b 100644
--- a/protocol/src/main/protobuf/UserBitShared.proto
+++ b/protocol/src/main/protobuf/UserBitShared.proto
@@ -4,6 +4,7 @@ option java_package = "org.apache.drill.exec.proto";
option java_outer_classname = "UserBitShared";
option optimize_for = SPEED;
+import "Types.proto";
import "Coordination.proto";
import "SchemaDef.proto";
@@ -38,16 +39,30 @@ message ParsingError{
}
message RecordBatchDef {
- repeated FieldMetadata field = 1;
- optional int32 record_count = 2;
+ optional int32 record_count = 1;
+ repeated SerializedField field = 2;
optional bool is_selection_vector_2 = 3;
}
-message FieldMetadata {
- optional FieldDef def = 1;
- optional int32 value_count = 2;
- optional int32 var_byte_length = 3;
- optional int32 group_count = 4; // number of groups. (number of repeated records)
- optional int32 buffer_length = 5;
- repeated FieldMetadata child = 6;
+message NamePart{
+
+ enum Type{
+ NAME = 0;
+ ARRAY = 1;
+ }
+
+ optional Type type = 1;
+ optional string name = 2;
+ optional NamePart child = 3;
+}
+
+message SerializedField {
+ optional common.MajorType major_type = 1; // the type associated with this field.
+ optional NamePart name_part = 2;
+ repeated SerializedField child = 3; // only in the cases of type == MAP or REPEAT_MAP or REPEATED_LIST
+
+ optional int32 value_count = 4;
+ optional int32 var_byte_length = 5;
+ optional int32 group_count = 6; // number of groups. (number of repeated records)
+ optional int32 buffer_length = 7;
}
[10/10] git commit: Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
Add support for RepeatedMapVector, MapVector and RepeatedListVector.
Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/cdc5daed
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/cdc5daed
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/cdc5daed
Branch: refs/heads/master
Commit: cdc5daed5218ed70445d178f5fc25df09c573001
Parents: 4ffef01
Author: Jacques Nadeau <ja...@apache.org>
Authored: Sun May 11 15:12:19 2014 -0700
Committer: Jacques Nadeau <ja...@apache.org>
Committed: Sun May 11 19:46:51 2014 -0700
----------------------------------------------------------------------
.../drill/common/expression/PathSegment.java | 13 +-
.../drill/common/expression/SchemaPath.java | 68 +
.../org/apache/drill/common/types/Types.java | 72 +-
.../apache/drill/common/util/PathScanner.java | 23 +-
.../src/main/codegen/data/ValueVectorTypes.tdd | 49 +-
.../src/main/codegen/includes/vv_imports.ftl | 15 +-
.../codegen/templates/AbstractFieldReader.java | 114 +
.../codegen/templates/AbstractFieldWriter.java | 104 +
.../src/main/codegen/templates/BaseReader.java | 65 +
.../src/main/codegen/templates/BaseWriter.java | 79 +
.../main/codegen/templates/ComplexReaders.java | 172 ++
.../main/codegen/templates/ComplexWriters.java | 130 +
.../codegen/templates/FixedValueVectors.java | 123 +-
.../src/main/codegen/templates/ListWriters.java | 180 ++
.../src/main/codegen/templates/MapWriters.java | 172 ++
.../src/main/codegen/templates/NullReader.java | 113 +
.../codegen/templates/NullableValueVectors.java | 107 +-
.../templates/ObjectInspectorHelper.java | 2 +-
.../codegen/templates/ObjectInspectors.java | 4 +-
.../codegen/templates/RecordValueAccessor.java | 2 +-
.../codegen/templates/RepeatedValueVectors.java | 124 +-
.../main/codegen/templates/SqlAccessors.java | 4 +-
.../src/main/codegen/templates/TypeHelper.java | 133 +-
.../templates/VariableLengthVectors.java | 57 +-
.../cache/VectorAccessibleSerializable.java | 38 +-
.../apache/drill/exec/expr/ClassGenerator.java | 135 +-
.../drill/exec/expr/EvaluationVisitor.java | 246 +-
.../exec/expr/ExpressionTreeMaterializer.java | 40 +-
.../exec/expr/ValueVectorReadExpression.java | 45 +-
.../exec/expr/fn/impl/HashFunctions.java.orig | 252 ++
.../drill/exec/expr/holders/ComplexHolder.java | 25 +
.../exec/expr/holders/RepeatedListHolder.java | 23 +
.../exec/expr/holders/RepeatedMapHolder.java | 23 +
.../memory/OutOfMemoryRuntimeException.java | 49 +
.../drill/exec/physical/impl/ScanBatch.java | 4 +-
.../impl/TopN/PriorityQueueTemplate.java | 4 +-
.../exec/physical/impl/TopN/TopNBatch.java | 30 +-
.../exec/physical/impl/WireRecordBatch.java | 12 +-
.../impl/aggregate/HashAggTemplate.java | 96 +-
.../physical/impl/aggregate/InternalBatch.java | 14 +-
.../exec/physical/impl/join/HashJoinBatch.java | 8 +-
.../exec/physical/impl/join/JoinStatus.java | 14 +-
.../exec/physical/impl/join/JoinTemplate.java | 16 +-
.../impl/mergereceiver/MergingRecordBatch.java | 18 +-
.../OrderedPartitionProjectorTemplate.java | 4 +-
.../partitionsender/OutgoingRecordBatch.java | 4 +-
.../PartitionSenderRootExec.java | 18 +-
.../impl/project/ProjectRecordBatch.java | 25 +-
.../impl/project/ProjectorTemplate.java | 23 +-
.../impl/svremover/RemovingRecordBatch.java | 38 +-
.../IteratorValidatorBatchIterator.java | 4 +-
.../exec/physical/impl/xsort/BatchGroup.java | 4 +-
.../physical/impl/xsort/ExternalSortBatch.java | 12 +-
.../drill/exec/record/AbstractRecordBatch.java | 18 +-
.../drill/exec/record/HyperVectorWrapper.java | 56 +-
.../drill/exec/record/MaterializedField.java | 152 +-
.../apache/drill/exec/record/RecordBatch.java | 18 +-
.../drill/exec/record/RecordBatchLoader.java | 22 +-
.../drill/exec/record/SimpleVectorWrapper.java | 61 +-
.../apache/drill/exec/record/TransferPair.java | 2 +-
.../apache/drill/exec/record/TypedFieldId.java | 196 +-
.../drill/exec/record/VectorAccessible.java | 2 +-
.../drill/exec/record/VectorContainer.java | 36 +-
.../apache/drill/exec/record/VectorWrapper.java | 10 +
.../apache/drill/exec/record/WritableBatch.java | 15 +-
.../exec/store/easy/json/JSONFormatPlugin.java | 10 +-
.../exec/store/easy/json/JSONRecordReader.java | 532 ----
.../exec/store/easy/json/JSONRecordReader2.java | 116 +
.../drill/exec/store/hive/HiveRecordReader.java | 2 +-
.../drill/exec/store/mock/MockRecordReader.java | 2 +-
.../exec/store/parquet/ParquetRecordReader.java | 7 +-
.../drill/exec/vector/AllocationHelper.java | 6 +-
.../drill/exec/vector/BaseDataValueVector.java | 26 +-
.../drill/exec/vector/BaseValueVector.java | 31 +-
.../org/apache/drill/exec/vector/BitVector.java | 50 +-
.../drill/exec/vector/RepeatedMutator.java | 2 +-
.../apache/drill/exec/vector/ValueVector.java | 75 +-
.../exec/vector/accessor/GenericAccessor.java | 2 +-
.../vector/complex/AbstractContainerVector.java | 113 +
.../exec/vector/complex/AbstractMapVector.java | 22 +
.../drill/exec/vector/complex/MapVector.java | 391 +++
.../drill/exec/vector/complex/Positionable.java | 22 +
.../exec/vector/complex/RepeatedListVector.java | 407 +++
.../exec/vector/complex/RepeatedMapVector.java | 478 ++++
.../drill/exec/vector/complex/StateTool.java | 31 +
.../drill/exec/vector/complex/WriteState.java | 46 +
.../exec/vector/complex/fn/JsonReader.java | 231 ++
.../vector/complex/fn/JsonRecordSplitter.java | 27 +
.../exec/vector/complex/fn/JsonWriter.java | 258 ++
.../complex/fn/ReaderJSONRecordSplitter.java | 178 ++
.../complex/fn/UTF8JsonRecordSplitter.java | 150 +
.../vector/complex/impl/AbstractBaseReader.java | 63 +
.../vector/complex/impl/AbstractBaseWriter.java | 72 +
.../vector/complex/impl/ComplexWriterImpl.java | 164 ++
.../complex/impl/RepeatedListReaderImpl.java | 113 +
.../complex/impl/RepeatedMapReaderImpl.java | 205 ++
.../complex/impl/SingleListReaderImpl.java | 92 +
.../complex/impl/SingleMapReaderImpl.java | 154 ++
.../complex/impl/VectorContainerWriter.java | 104 +
.../exec/vector/complex/reader/FieldReader.java | 29 +
.../exec/vector/complex/writer/FieldWriter.java | 29 +
.../java/org/apache/drill/PlanTestBase.java | 7 +-
.../exec/compile/TestEvaluationVisitor.java | 91 +
.../apache/drill/exec/expr/ExpressionTest.java | 6 +-
.../drill/exec/fn/impl/TestDateFunctions.java | 74 +-
.../exec/physical/impl/SimpleRootExec.java | 2 +-
.../exec/physical/impl/TestCastFunctions.java | 2 +-
.../physical/impl/TestConvertFunctions.java | 22 +-
.../drill/exec/physical/impl/TestDecimal.java | 20 +-
.../physical/impl/TestExtractFunctions.java | 2 +-
.../impl/TestImplicitCastFunctions.java | 4 +-
.../physical/impl/TestReverseImplicitCast.java | 2 +-
.../exec/physical/impl/TopN/TestSimpleTopN.java | 2 +-
.../drill/exec/physical/impl/agg/TestAgg.java | 12 +-
.../exec/physical/impl/join/TestMergeJoin.java | 1 +
.../TestOrderedPartitionExchange.java | 12 +-
.../exec/physical/impl/writer/TestWriter.java | 6 +-
.../impl/xsort/TestSimpleExternalSort.java | 6 +-
.../record/ExpressionTreeMaterializerTest.java | 5 +-
.../drill/exec/record/vector/TestDateTypes.java | 60 +-
.../exec/record/vector/TestValueVector.java | 16 +-
.../exec/store/json/JSONRecordReaderTest.java | 395 ---
.../exec/store/json/JsonRecordReader2Test.java | 42 +
.../vector/complex/writer/TestJsonReader.java | 162 ++
.../vector/complex/writer/TestRepeated.java | 264 ++
.../src/test/resources/jsoninput/input2.json | 40 +
.../apache/drill/jdbc/DrillAccessorList.java | 10 +-
.../jdbc/test/TestAggregateFunctionsQuery.java | 1 +
.../drill/jdbc/test/TestFunctionsQuery.java | 1 +
pom.xml | 1 +
.../apache/drill/common/types/TypeProtos.java | 17 +-
.../drill/exec/proto/SchemaDefProtos.java | 1766 +-----------
.../apache/drill/exec/proto/UserBitShared.java | 2582 +++++++++++++-----
protocol/src/main/protobuf/SchemaDef.proto | 17 -
protocol/src/main/protobuf/Types.proto | 3 +-
protocol/src/main/protobuf/UserBitShared.proto | 33 +-
136 files changed, 9300 insertions(+), 4363 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/common/src/main/java/org/apache/drill/common/expression/PathSegment.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/PathSegment.java b/common/src/main/java/org/apache/drill/common/expression/PathSegment.java
index ffbe493..0ecfcd0 100644
--- a/common/src/main/java/org/apache/drill/common/expression/PathSegment.java
+++ b/common/src/main/java/org/apache/drill/common/expression/PathSegment.java
@@ -30,13 +30,22 @@ public abstract class PathSegment{
private final int index;
public ArraySegment(String numberAsText, PathSegment child){
- this.child = child;
- this.index = Integer.parseInt(numberAsText);
+ this(Integer.parseInt(numberAsText), child);
}
public ArraySegment(int index, PathSegment child){
this.child = child;
this.index = index;
+ assert index >=0;
+ }
+
+ public ArraySegment(PathSegment child){
+ this.child = child;
+ this.index = -1;
+ }
+
+ public boolean hasIndex(){
+ return index != -1;
}
public ArraySegment(int index){
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/common/src/main/java/org/apache/drill/common/expression/SchemaPath.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/expression/SchemaPath.java b/common/src/main/java/org/apache/drill/common/expression/SchemaPath.java
index fd75aeb..25ee8b4 100644
--- a/common/src/main/java/org/apache/drill/common/expression/SchemaPath.java
+++ b/common/src/main/java/org/apache/drill/common/expression/SchemaPath.java
@@ -35,12 +35,16 @@ import org.apache.drill.common.expression.parser.ExprParser.parse_return;
import org.apache.drill.common.expression.visitors.ExprVisitor;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.proto.UserBitShared.NamePart;
+import org.apache.drill.exec.proto.UserBitShared.NamePart.Type;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
public class SchemaPath extends LogicalExpressionBase {
@@ -60,6 +64,14 @@ public class SchemaPath extends LogicalExpressionBase {
return new SchemaPath(s);
}
+ public PathSegment getLastSegment(){
+ PathSegment s= rootSegment;
+ while(s.getChild() != null){
+ s = s.getChild();
+ }
+ return s;
+ }
+
/**
*
* @param simpleName
@@ -71,6 +83,57 @@ public class SchemaPath extends LogicalExpressionBase {
if(simpleName.contains(".")) throw new IllegalStateException("This is deprecated and only supports simpe paths.");
}
+
+ public NamePart getAsNamePart(){
+ return getNamePart(rootSegment);
+ }
+
+ private static NamePart getNamePart(PathSegment s){
+ if(s == null) return null;
+ NamePart.Builder b = NamePart.newBuilder();
+ if(s.getChild() != null){
+ b.setChild(getNamePart(s.getChild()));
+ }
+
+ if(s.isArray()){
+ if(s.getArraySegment().hasIndex()) throw new IllegalStateException("You cannot convert a indexed schema path to a NamePart. NameParts can only reference Vectors, not individual records or values.");
+ b.setType(Type.ARRAY);
+ }else{
+ b.setType(Type.NAME);
+ b.setName(s.getNameSegment().getPath());
+ }
+ return b.build();
+ }
+
+ private static PathSegment getPathSegment(NamePart n){
+ PathSegment child = n.hasChild() ? getPathSegment(n.getChild()) : null;
+ if(n.getType() == Type.ARRAY){
+ return new ArraySegment(child);
+ }else{
+ return new NameSegment(n.getName(), child);
+ }
+ }
+
+ public static SchemaPath create(NamePart namePart){
+ Preconditions.checkArgument(namePart.getType() == NamePart.Type.NAME);
+ return new SchemaPath((NameSegment) getPathSegment(namePart));
+ }
+
+
+ /**
+ * A simple is a path where there are no repeated elements outside the lowest level of the path.
+ * @return Whether this path is a simple path.
+ */
+ public boolean isSimplePath(){
+ PathSegment seg = rootSegment;
+ while(seg != null){
+ if(seg.isArray() && !seg.isLastPath()) return false;
+ seg = seg.getChild();
+ }
+ return true;
+ }
+
+
public SchemaPath(SchemaPath path){
super(path.getPosition());
this.rootSegment = path.rootSegment;
@@ -96,6 +159,11 @@ public class SchemaPath extends LogicalExpressionBase {
return new SchemaPath(newRoot);
}
+ public SchemaPath getUnindexedArrayChild(){
+ NameSegment newRoot = rootSegment.cloneWithNewChild(new ArraySegment(null));
+ return new SchemaPath(newRoot);
+ }
+
public SchemaPath getChild(int index){
NameSegment newRoot = rootSegment.cloneWithNewChild(new ArraySegment(index));
return new SchemaPath(newRoot);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/common/src/main/java/org/apache/drill/common/types/Types.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/types/Types.java b/common/src/main/java/org/apache/drill/common/types/Types.java
index 325e20a..c6ac82e 100644
--- a/common/src/main/java/org/apache/drill/common/types/Types.java
+++ b/common/src/main/java/org/apache/drill/common/types/Types.java
@@ -27,18 +27,28 @@ import static org.apache.drill.common.types.TypeProtos.DataMode.REPEATED;
public class Types {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Types.class);
-
+
public static final MajorType NULL = required(MinorType.NULL);
public static final MajorType LATE_BIND_TYPE = optional(MinorType.LATE);
public static final MajorType REQUIRED_BIT = required(MinorType.BIT);
-
+
public static enum Comparability{
UNKNOWN, NONE, EQUAL, ORDERED;
}
-
+
+ public static boolean isComplex(MajorType type){
+ switch(type.getMinorType()){
+ case LIST:
+ case MAP:
+ return true;
+ }
+
+ return false;
+ }
+
public static boolean isNumericType(MajorType type){
if(type.getMode() == REPEATED) return false;
-
+
switch(type.getMinorType()){
case BIGINT:
case DECIMAL38SPARSE:
@@ -62,10 +72,10 @@ public class Types {
return false;
}
}
-
+
public static int getSqlType(MajorType type){
if(type.getMode() == DataMode.REPEATED) return java.sql.Types.ARRAY;
-
+
switch(type.getMinorType()){
case BIGINT:
return java.sql.Types.BIGINT;
@@ -133,7 +143,7 @@ public class Types {
throw new UnsupportedOperationException();
}
}
-
+
public static boolean isUnSigned(MajorType type){
switch(type.getMinorType()){
case UINT1:
@@ -144,7 +154,7 @@ public class Types {
default:
return false;
}
-
+
}
public static boolean usesHolderForGet(MajorType type){
if(type.getMode() == REPEATED) return true;
@@ -165,13 +175,13 @@ public class Types {
case TIME:
case TIMESTAMP:
return false;
-
- default:
+
+ default:
return true;
}
-
+
}
-
+
public static boolean isFixedWidthType(MajorType type){
switch(type.getMinorType()){
case VARBINARY:
@@ -182,8 +192,8 @@ public class Types {
return true;
}
}
-
-
+
+
public static boolean isStringScalarType(MajorType type){
if(type.getMode() == REPEATED) return false;
switch(type.getMinorType()){
@@ -192,26 +202,26 @@ public class Types {
case VARCHAR:
case VAR16CHAR:
return true;
- default:
+ default:
return false;
}
}
-
+
public static boolean isBytesScalarType(MajorType type){
if(type.getMode() == REPEATED) return false;
switch(type.getMinorType()){
case FIXEDBINARY:
case VARBINARY:
return true;
- default:
+ default:
return false;
}
}
-
+
public static Comparability getComparability(MajorType type){
if(type.getMode() == REPEATED) return Comparability.NONE;
if(type.getMinorType() == MinorType.LATE) return Comparability.UNKNOWN;
-
+
switch(type.getMinorType()){
case LATE:
return Comparability.UNKNOWN;
@@ -223,21 +233,21 @@ public class Types {
default:
return Comparability.ORDERED;
}
-
+
}
-
-
+
+
public static boolean softEquals(MajorType a, MajorType b, boolean allowNullSwap){
if(a.getMinorType() != b.getMinorType()){
if(
(a.getMinorType() == MinorType.VARBINARY && b.getMinorType() == MinorType.VARCHAR) ||
- (b.getMinorType() == MinorType.VARBINARY && a.getMinorType() == MinorType.VARCHAR)
+ (b.getMinorType() == MinorType.VARBINARY && a.getMinorType() == MinorType.VARCHAR)
){
// fall through;
}else{
- return false;
+ return false;
}
-
+
}
if(allowNullSwap){
switch(a.getMode()){
@@ -252,23 +262,23 @@ public class Types {
}
return a.getMode() == b.getMode();
}
-
+
public static boolean isLateBind(MajorType type){
return type.getMinorType() == MinorType.LATE;
}
-
+
public static MajorType withMode(MinorType type, DataMode mode){
return MajorType.newBuilder().setMode(mode).setMinorType(type).build();
}
-
+
public static MajorType required(MinorType type){
return MajorType.newBuilder().setMode(DataMode.REQUIRED).setMinorType(type).build();
}
-
+
public static MajorType repeated(MinorType type){
return MajorType.newBuilder().setMode(REPEATED).setMinorType(type).build();
}
-
+
public static MajorType optional(MinorType type){
return MajorType.newBuilder().setMode(DataMode.OPTIONAL).setMinorType(type).build();
}
@@ -289,7 +299,7 @@ public class Types {
public static MajorType getMajorTypeFromName(String typeName) {
return getMajorTypeFromName(typeName, DataMode.REQUIRED);
}
-
+
public static MajorType getMajorTypeFromName(String typeName, DataMode mode) {
switch (typeName) {
case "bool":
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/common/src/main/java/org/apache/drill/common/util/PathScanner.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/util/PathScanner.java b/common/src/main/java/org/apache/drill/common/util/PathScanner.java
index 65dcf23..e7e48a4 100644
--- a/common/src/main/java/org/apache/drill/common/util/PathScanner.java
+++ b/common/src/main/java/org/apache/drill/common/util/PathScanner.java
@@ -41,13 +41,13 @@ import com.google.common.collect.Sets;
public class PathScanner {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PathScanner.class);
-
+
private static final SubTypesScanner subTypeScanner = new SubTypesScanner();
private static final TypeAnnotationsScanner annotationsScanner = new TypeAnnotationsScanner();
private static final ResourcesScanner resourcesScanner = new ResourcesScanner();
private static final Object SYNC = new Object();
static volatile Reflections REFLECTIONS = null;
-
+
public static <A extends Annotation, T> Map<A, Class<? extends T>> scanForAnnotatedImplementations(Class<A> annotationClass, Class<T> baseClass, final List<String> scanPackages){
Collection<Class<? extends T>> providerClasses = scanForImplementations(baseClass, scanPackages);
@@ -58,7 +58,7 @@ public class PathScanner {
if(annotation == null) continue;
map.put(annotation, c);
}
-
+
return map;
}
@@ -68,12 +68,12 @@ public class PathScanner {
}
return REFLECTIONS;
}
-
+
public static <T> Class<?>[] scanForImplementationsArr(Class<T> baseClass, final List<String> scanPackages){
Collection<Class<? extends T>> imps = scanForImplementations(baseClass, scanPackages);
return imps.toArray(new Class<?>[imps.size()]);
}
-
+
public static <T> Set<Class<? extends T>> scanForImplementations(Class<T> baseClass, final List<String> scanPackages){
synchronized(SYNC){
Set<Class<? extends T>> classes = getReflections().getSubTypesOf(baseClass);
@@ -85,15 +85,16 @@ public class PathScanner {
return classes;
}
}
-
+
private static Collection<URL> getMarkedPaths(){
- return forResource(CommonConstants.DRILL_JAR_MARKER_FILE, true);
+ Collection<URL> urls = forResource(CommonConstants.DRILL_JAR_MARKER_FILE, true);
+ return urls;
}
public static Collection<URL> getConfigURLs(){
return forResource(CommonConstants.DRILL_JAR_MARKER_FILE, false);
}
-
+
public static Set<URL> forResource(String name, boolean stripName, ClassLoader... classLoaders) {
final Set<URL> result = Sets.newHashSet();
@@ -105,12 +106,12 @@ public class PathScanner {
final Enumeration<URL> urls = classLoader.getResources(resourceName);
while (urls.hasMoreElements()) {
final URL url = urls.nextElement();
-
+
int index = url.toExternalForm().lastIndexOf(resourceName);
if (index != -1 && stripName) {
result.add(new URL(url.toExternalForm().substring(0, index)));
} else {
- result.add(url);
+ result.add(url);
}
}
} catch (IOException e) {
@@ -122,6 +123,6 @@ public class PathScanner {
return result;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd b/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
index c5ae64e..d3b3ac4 100644
--- a/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
+++ b/exec/java-exec/src/main/codegen/data/ValueVectorTypes.tdd
@@ -56,10 +56,10 @@
minor: [
{ class: "Int", valueHolder: "IntHolder" },
{ class: "UInt4", valueHolder: "UInt4Holder" },
- { class: "Float4", javaType: "float" },
- { class: "Time", javaType: "int"},
- { class: "IntervalYear", javaType: "int"}
- { class: "Decimal9", maxPrecisionDigits: 9},
+ { class: "Float4", javaType: "float" , boxedType: "Float" },
+ { class: "Time", javaType: "int", friendlyType: "DateTime" },
+ { class: "IntervalYear", javaType: "int", friendlyType: "Period" }
+ { class: "Decimal9", maxPrecisionDigits: 9, friendlyType: "BigDecimal" },
]
},
{
@@ -70,10 +70,10 @@
minor: [
{ class: "BigInt" },
{ class: "UInt8" },
- { class: "Float8", javaType: "double" , boxedType: "Double"},
- { class: "Date", javaType: "long"},
- { class: "TimeStamp", javaType: "long"}
- { class: "Decimal18", maxPrecisionDigits: 18},
+ { class: "Float8", javaType: "double" , boxedType: "Double" },
+ { class: "Date", javaType: "long", friendlyType: "DateTime" },
+ { class: "TimeStamp", javaType: "long", friendlyType: "DateTime" }
+ { class: "Decimal18", maxPrecisionDigits: 18, friendlyType: "BigDecimal" },
<#--
{ class: "Money", maxPrecisionDigits: 2, scale: 1, },
-->
@@ -82,72 +82,81 @@
{
major: "Fixed",
width: 12,
- javaType: "ByteBuf"
+ javaType: "ByteBuf",
+ boxedType: "ByteBuf",
minor: [
- { class: "TimeStampTZ", milliSecondsSize: 8 },
- { class: "IntervalDay", milliSecondsOffset: 4 }
+ { class: "TimeStampTZ", milliSecondsSize: 8, friendlyType: "DateTime" },
+ { class: "IntervalDay", milliSecondsOffset: 4, friendlyType: "Period" }
]
},
{
major: "Fixed",
width: 16,
javaType: "ByteBuf"
+ boxedType: "ByteBuf",
minor: [
- { class: "Interval", daysOffset: 4, milliSecondsOffset: 8}
+ { class: "Interval", daysOffset: 4, milliSecondsOffset: 8, friendlyType: "Period" }
]
},
{
major: "Fixed",
width: 12,
javaType: "ByteBuf",
+ boxedType: "ByteBuf",
minor: [
<#--
{ class: "TimeTZ" },
{ class: "Interval" }
-->
- { class: "Decimal28Dense", maxPrecisionDigits: 28, nDecimalDigits: 3}
+ { class: "Decimal28Dense", maxPrecisionDigits: 28, nDecimalDigits: 3, friendlyType: "BigDecimal" }
]
},
{
major: "Fixed",
width: 16,
javaType: "ByteBuf",
+ boxedType: "ByteBuf",
+
minor: [
- { class: "Decimal38Dense", maxPrecisionDigits: 38, nDecimalDigits: 4}
+ { class: "Decimal38Dense", maxPrecisionDigits: 38, nDecimalDigits: 4, friendlyType: "BigDecimal" }
]
},
{
major: "Fixed",
width: 24,
javaType: "ByteBuf",
+ boxedType: "ByteBuf",
minor: [
- { class: "Decimal38Sparse", maxPrecisionDigits: 38, nDecimalDigits: 6}
+ { class: "Decimal38Sparse", maxPrecisionDigits: 38, nDecimalDigits: 6, friendlyType: "BigDecimal" }
]
},
{
major: "Fixed",
width: 20,
javaType: "ByteBuf",
+ boxedType: "ByteBuf",
minor: [
- { class: "Decimal28Sparse", maxPrecisionDigits: 28, nDecimalDigits: 5}
+ { class: "Decimal28Sparse", maxPrecisionDigits: 28, nDecimalDigits: 5, friendlyType: "BigDecimal" }
]
},
{
major: "VarLen",
width: 4,
javaType: "int",
+ boxedType: "ByteBuf",
minor: [
- { class: "VarBinary" },
- { class: "VarChar" }
- { class: "Var16Char" }
+ { class: "VarBinary" , friendlyType: "byte[]" },
+ { class: "VarChar" , friendlyType: "Text" }
+ { class: "Var16Char" , friendlyType: "String" }
]
},
{
major: "Bit",
width: 1,
javaType: "int",
+ boxedType: "ByteBuf",
minor: [
- { class: "Bit" }
+ { class: "Bit" , friendlyType: "Boolean" }
]
}
]
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/includes/vv_imports.ftl b/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
index 49c9ca2..71404d3 100644
--- a/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
+++ b/exec/java-exec/src/main/codegen/includes/vv_imports.ftl
@@ -25,13 +25,21 @@ import org.apache.commons.lang3.ArrayUtils;
import org.apache.drill.exec.memory.AccountingByteBuf;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.proto.SchemaDefProtos;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.*;
import org.apache.drill.exec.vector.*;
import org.apache.drill.exec.expr.holders.*;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.types.TypeProtos.*;
import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.*;
+import org.apache.drill.exec.vector.complex.reader.*;
+import org.apache.drill.exec.vector.complex.impl.*;
+import org.apache.drill.exec.vector.complex.writer.*;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
import com.sun.codemodel.JType;
import com.sun.codemodel.JCodeModel;
@@ -50,6 +58,11 @@ import java.sql.Timestamp;
import java.math.BigDecimal;
import java.math.BigInteger;
+import org.joda.time.DateTime;
+import org.joda.time.Period;
+
+import org.apache.hadoop.io.Text;
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/AbstractFieldReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/AbstractFieldReader.java b/exec/java-exec/src/main/codegen/templates/AbstractFieldReader.java
new file mode 100644
index 0000000..5420f99
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/AbstractFieldReader.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/AbstractFieldReader.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+
+@SuppressWarnings("unused")
+abstract class AbstractFieldReader extends AbstractBaseReader implements FieldReader{
+
+ AbstractFieldReader(){
+ super();
+ }
+
+ <#list ["Object", "BigDecimal", "Integer", "Long", "Boolean",
+ "Character", "DateTime", "Period", "Double", "Float",
+ "Text", "String", "Byte", "Short", "byte[]"] as friendlyType>
+ <#assign safeType=friendlyType />
+ <#if safeType=="byte[]"><#assign safeType="ByteArray" /></#if>
+
+ public ${friendlyType} read${safeType}(int arrayIndex){
+ fail("read${safeType}(int arrayIndex)");
+ return null;
+ }
+
+ public ${friendlyType} read${safeType}(){
+ fail("read${safeType}()");
+ return null;
+ }
+
+ </#list>
+
+ public void copyAsValue(MapWriter writer){
+ fail("CopyAsValue MapWriter");
+ }
+ public void copyAsField(String name, MapWriter writer){
+ fail("CopyAsField MapWriter");
+ }
+ public void copyAsValue(ListWriter writer){
+ fail("CopyAsValueList");
+ }
+ public void copyAsField(String name, ListWriter writer){
+ fail("CopyAsFieldList");
+ }
+
+ <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first />
+ <#assign boxedType = (minor.boxedType!type.boxedType) />
+
+ public void read(Nullable${name}Holder holder){
+ fail("${name}");
+ }
+
+ public void read(int arrayIndex, ${name}Holder holder){
+ fail("Repeated${name}");
+ }
+
+ public void read(int arrayIndex, Nullable${name}Holder holder){
+ fail("Repeated${name}");
+ }
+
+ public void copyAsValue(${name}Writer writer){
+ fail("CopyAsValue${name}");
+ }
+ public void copyAsField(String name, ${name}Writer writer){
+ fail("CopyAsField${name}");
+ }
+ </#list></#list>
+
+ public FieldReader reader(String name){
+ fail("reader(String name)");
+ return null;
+ }
+
+ public FieldReader reader(){
+ fail("reader()");
+ return null;
+
+ }
+
+ public int size(){
+ fail("size()");
+ return -1;
+ }
+
+ private void fail(String name){
+ throw new IllegalArgumentException(String.format("You tried to read a [%s] type when you are using a field reader of type [%s].", name, this.getClass().getSimpleName()));
+ }
+
+
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/AbstractFieldWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/AbstractFieldWriter.java b/exec/java-exec/src/main/codegen/templates/AbstractFieldWriter.java
new file mode 100644
index 0000000..2f90a1a
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/AbstractFieldWriter.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/AbstractFieldWriter.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+
+@SuppressWarnings("unused")
+abstract class AbstractFieldWriter extends AbstractBaseWriter implements FieldWriter{
+
+ AbstractFieldWriter(FieldWriter parent){
+ super(parent);
+ }
+
+ public void start(){
+ throw new IllegalStateException(String.format("You tried to start when you are using a ValueWriter of type %s.", this.getClass().getSimpleName()));
+ }
+
+ public void end(){
+ throw new IllegalStateException(String.format("You tried to end when you are using a ValueWriter of type %s.", this.getClass().getSimpleName()));
+ }
+
+ <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first />
+ public void write(${name}Holder holder){
+ fail("${name}");
+ }
+
+ </#list></#list>
+
+ public MapWriter map(){
+ fail("Map");
+ return null;
+ }
+
+ public ListWriter list(){
+ fail("List");
+ return null;
+ }
+
+ public MapWriter map(String name){
+ fail("Map");
+ return null;
+ }
+
+ public ListWriter list(String name){
+ fail("List");
+ return null;
+ }
+
+ <#list vv.types as type><#list type.minor as minor>
+ <#assign lowerName = minor.class?uncap_first />
+ <#if lowerName == "int" ><#assign lowerName = "integer" /></#if>
+ <#assign upperName = minor.class?upper_case />
+ <#assign capName = minor.class?cap_first />
+ public ${capName}Writer ${lowerName}(String name){
+ fail("${capName}");
+ return null;
+ }
+ public ${capName}Writer ${lowerName}(){
+ fail("${capName}");
+ return null;
+ }
+
+
+ </#list></#list>
+
+
+ public void copyReader(FieldReader reader){
+ fail("Copy FieldReader");
+ }
+ public void copyReaderToField(String name, FieldReader reader){
+ fail("Copy FieldReader to STring");
+ }
+
+ private void fail(String name){
+ throw new IllegalArgumentException(String.format("You tried to write a %s type when you are using a ValueWriter of type %s.", name, this.getClass().getSimpleName()));
+ }
+
+
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/BaseReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/BaseReader.java b/exec/java-exec/src/main/codegen/templates/BaseReader.java
new file mode 100644
index 0000000..42f4826
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/BaseReader.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/reader/BaseReader.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.reader;
+
+<#include "/@includes/vv_imports.ftl" />
+
+
+
+@SuppressWarnings("unused")
+public interface BaseReader extends Positionable{
+ public MajorType getType();
+
+ public interface MapReader extends BaseReader, Iterable<String>{
+ FieldReader reader(String name);
+ }
+
+ public interface RepeatedMapReader extends MapReader{
+ boolean next();
+ int size();
+ }
+
+ public interface ListReader extends BaseReader{
+ FieldReader reader();
+ }
+
+ public interface RepeatedListReader extends ListReader{
+ boolean next();
+ int size();
+ }
+
+ public interface ScalarReader extends
+ <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> ${name}Reader, </#list></#list>
+ <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> Repeated${name}Reader, </#list></#list>
+ BaseReader {}
+
+ interface ComplexReader{
+ MapReader rootAsMap();
+ ListReader rootAsList();
+ boolean rootIsMap();
+ boolean ok();
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/BaseWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/BaseWriter.java b/exec/java-exec/src/main/codegen/templates/BaseWriter.java
new file mode 100644
index 0000000..0c241b2
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/BaseWriter.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/writer/BaseWriter.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.writer;
+
+<#include "/@includes/vv_imports.ftl" />
+
+@SuppressWarnings("unused")
+public interface BaseWriter extends Positionable{
+ FieldWriter getParent();
+ boolean ok();
+ WriteState getState();
+
+ public interface MapWriter extends BaseWriter{
+ <#list vv.types as type><#list type.minor as minor>
+ <#assign lowerName = minor.class?uncap_first />
+ <#if lowerName == "int" ><#assign lowerName = "integer" /></#if>
+ <#assign upperName = minor.class?upper_case />
+ <#assign capName = minor.class?cap_first />
+ ${capName}Writer ${lowerName}(String name);
+ </#list></#list>
+
+ void copyReaderToField(String name, FieldReader reader);
+ MapWriter map(String name);
+ ListWriter list(String name);
+ void start();
+ void end();
+ }
+
+ public interface ListWriter extends BaseWriter{
+ void start();
+ void end();
+ MapWriter map();
+ ListWriter list();
+ void copyReader(FieldReader reader);
+
+ <#list vv.types as type><#list type.minor as minor>
+ <#assign lowerName = minor.class?uncap_first />
+ <#if lowerName == "int" ><#assign lowerName = "integer" /></#if>
+ <#assign upperName = minor.class?upper_case />
+ <#assign capName = minor.class?cap_first />
+ ${capName}Writer ${lowerName}();
+ </#list></#list>
+ }
+
+ public interface ScalarWriter extends
+ <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first /> ${name}Writer, </#list></#list> BaseWriter {}
+
+ public interface ComplexWriter{
+ void allocate();
+ void clear();
+ void copyReader(FieldReader reader);
+ MapWriter rootAsMap();
+ ListWriter rootAsList();
+ boolean ok();
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/ComplexReaders.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ComplexReaders.java b/exec/java-exec/src/main/codegen/templates/ComplexReaders.java
new file mode 100644
index 0000000..a6f2f04
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/ComplexReaders.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.lang.Override;
+import java.util.List;
+
+import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.vector.complex.IndexHolder;
+import org.apache.drill.exec.vector.complex.writer.IntervalWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+<@pp.dropOutputFile />
+<#list vv.types as type>
+<#list type.minor as minor>
+<#list ["", "Repeated"] as mode>
+<#assign lowerName = minor.class?uncap_first />
+<#if lowerName == "int" ><#assign lowerName = "integer" /></#if>
+<#assign name = mode + minor.class?cap_first />
+<#assign javaType = (minor.javaType!type.javaType) />
+<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) />
+<#assign safeType=friendlyType />
+<#if safeType=="byte[]"><#assign safeType="ByteArray" /></#if>
+
+<#assign hasFriendly = minor.friendlyType!"no" == "no" />
+
+<#list ["", "Nullable"] as nullMode>
+<#if (mode == "Repeated" && nullMode == "") || mode == "" >
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/${nullMode}${name}ReaderImpl.java" />
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+
+@SuppressWarnings("unused")
+public class ${nullMode}${name}ReaderImpl extends AbstractFieldReader {
+
+ private final ${nullMode}${name}Vector vector;
+
+ public ${nullMode}${name}ReaderImpl(${nullMode}${name}Vector vector){
+ super();
+ this.vector = vector;
+ }
+
+ public MajorType getType(){
+ return vector.getField().getType();
+ }
+
+ public boolean isSet(){
+ <#if nullMode == "Nullable">
+ return !vector.getAccessor().isNull(idx());
+ <#else>
+ return true;
+ </#if>
+ }
+
+
+
+
+ <#if mode == "Repeated">
+
+ public void copyAsValue(${minor.class?cap_first}Writer writer){
+ Repeated${minor.class?cap_first}WriterImpl impl = (Repeated${minor.class?cap_first}WriterImpl) writer;
+ impl.inform(impl.vector.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+ public void copyAsField(String name, MapWriter writer){
+ Repeated${minor.class?cap_first}WriterImpl impl = (Repeated${minor.class?cap_first}WriterImpl) writer.list(name).${lowerName}();
+ impl.inform(impl.vector.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+ public int size(){
+ return vector.getAccessor().getCount(idx());
+ }
+
+ public void read(int arrayIndex, ${minor.class?cap_first}Holder h){
+ vector.getAccessor().get(idx(), arrayIndex, h);
+ }
+ public void read(int arrayIndex, Nullable${minor.class?cap_first}Holder h){
+ vector.getAccessor().get(idx(), arrayIndex, h);
+ }
+
+ public ${friendlyType} read${safeType}(int arrayIndex){
+ return vector.getAccessor().getSingleObject(idx(), arrayIndex);
+ }
+
+
+ public List<Object> readObject(){
+ return (List<Object>) (Object) vector.getAccessor().getObject(idx());
+ }
+
+ <#else>
+
+ public void copyAsValue(${minor.class?cap_first}Writer writer){
+ ${nullMode}${minor.class?cap_first}WriterImpl impl = (${nullMode}${minor.class?cap_first}WriterImpl) writer;
+ impl.inform(impl.vector.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+ public void copyAsField(String name, MapWriter writer){
+ ${nullMode}${minor.class?cap_first}WriterImpl impl = (${nullMode}${minor.class?cap_first}WriterImpl) writer.${lowerName}(name);
+ impl.inform(impl.vector.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+ public void read(Nullable${minor.class?cap_first}Holder h){
+ vector.getAccessor().get(idx(), h);
+ }
+
+ public ${friendlyType} read${safeType}(){
+ return vector.getAccessor().getObject(idx());
+ }
+
+ public void copyValue(FieldWriter w){
+
+ }
+
+ public Object readObject(){
+ return vector.getAccessor().getObject(idx());
+ }
+
+
+ </#if>
+}
+</#if>
+</#list>
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/reader/${name}Reader.java" />
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.reader;
+
+<#include "/@includes/vv_imports.ftl" />
+@SuppressWarnings("unused")
+public interface ${name}Reader extends BaseReader{
+
+ <#if mode == "Repeated">
+ public int size();
+ public void read(int arrayIndex, ${minor.class?cap_first}Holder h);
+ public void read(int arrayIndex, Nullable${minor.class?cap_first}Holder h);
+ public Object readObject(int arrayIndex);
+ public ${friendlyType} read${safeType}(int arrayIndex);
+ <#else>
+ public void read(Nullable${minor.class?cap_first}Holder h);
+ public Object readObject();
+ public ${friendlyType} read${safeType}();
+ </#if>
+ public boolean isSet();
+ public void copyAsValue(${minor.class}Writer writer);
+ public void copyAsField(String name, ${minor.class}Writer writer);
+
+}
+
+
+
+</#list>
+</#list>
+</#list>
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/ComplexWriters.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ComplexWriters.java b/exec/java-exec/src/main/codegen/templates/ComplexWriters.java
new file mode 100644
index 0000000..72ff135
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/ComplexWriters.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.lang.Override;
+import java.util.Vector;
+
+import org.apache.drill.exec.vector.AllocationHelper;
+import org.apache.drill.exec.vector.ValueVector.Mutator;
+import org.apache.drill.exec.vector.complex.IndexHolder;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+<@pp.dropOutputFile />
+<#list vv.types as type>
+<#list type.minor as minor>
+<#list ["", "Nullable", "Repeated"] as mode>
+<#assign name = mode + minor.class?cap_first />
+<#assign eName = name />
+<#assign javaType = (minor.javaType!type.javaType) />
+
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/${eName}WriterImpl.java" />
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+
+@SuppressWarnings("unused")
+public class ${eName}WriterImpl extends AbstractFieldWriter {
+
+ private final ${name}Vector.Mutator mutator;
+ final ${name}Vector vector;
+
+ public ${eName}WriterImpl(${name}Vector vector, AbstractFieldWriter parent){
+ super(parent);
+ this.mutator = vector.getMutator();
+ this.vector = vector;
+ }
+
+ public void allocate(){
+ inform(vector.allocateNewSafe());
+ }
+
+ public void clear(){
+ vector.clear();
+ }
+
+ protected int idx(){
+ return super.idx();
+ }
+
+ protected void inform(boolean ok){
+ super.inform(ok);
+ }
+
+ <#if mode == "Repeated">
+
+ public void write(${minor.class?cap_first}Holder h){
+ if(ok()){
+ // update to inform(addSafe) once available for all repeated vector types for holders.
+ inform(mutator.addSafe(idx(), h));
+ }
+ }
+
+ public void write(Nullable${minor.class?cap_first}Holder h){
+ if(ok()){
+ // update to inform(addSafe) once available for all repeated vector types for holders.
+ inform(mutator.addSafe(idx(), h));
+ }
+ }
+
+ public void setPosition(int idx){
+ super.setPosition(idx);
+ inform(mutator.startNewGroup(idx));
+ }
+
+
+ <#else>
+
+ public void write(${minor.class}Holder h){
+ if(ok()){
+ // update to inform(setSafe) once available for all vector types for holders.
+ inform(mutator.setSafe(idx(), h));
+ }
+ }
+
+ public void write(Nullable${minor.class}Holder h){
+ if(ok()){
+ // update to inform(setSafe) once available for all vector types for holders.
+ inform(mutator.setSafe(idx(), h));
+ }
+ }
+
+ </#if>
+
+}
+
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/writer/${eName}Writer.java" />
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.writer;
+
+<#include "/@includes/vv_imports.ftl" />
+@SuppressWarnings("unused")
+public interface ${eName}Writer extends BaseWriter{
+ public void write(${minor.class}Holder h);
+}
+
+
+
+</#list>
+</#list>
+</#list>
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java b/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
index f9916c9..8c6c037 100644
--- a/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/FixedValueVectors.java
@@ -23,9 +23,12 @@ import java.sql.Timestamp;
import java.math.BigDecimal;
import java.math.BigInteger;
+import org.joda.time.Period;
+
<@pp.dropOutputFile />
<#list vv.types as type>
<#list type.minor as minor>
+<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) />
<#if type.major == "Fixed">
<@pp.changeOutputFile name="/org/apache/drill/exec/vector/${minor.class}Vector.java" />
@@ -71,6 +74,12 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
public void allocateNew() {
+ if(!allocateNewSafe()){
+ throw new OutOfMemoryRuntimeException("Failure while allocating buffer.");
+ }
+ }
+
+ public boolean allocateNewSafe() {
clear();
if (allocationMonitor > 5) {
allocationValueCount = Math.max(2, (int) (allocationValueCount * 0.9));
@@ -80,7 +89,9 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
allocationMonitor = 0;
}
this.data = allocator.buffer(allocationValueCount * ${type.width});
+ if(data == null) return false;
this.data.readerIndex(0);
+ return true;
}
/**
@@ -94,9 +105,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
}
@Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
+ public SerializedField getMetadata() {
+ return getMetadataBuilder()
.setValueCount(valueCount)
.setBufferLength(valueCount * ${type.width})
.build();
@@ -113,8 +123,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -169,8 +179,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
}
@Override
- public void copyValue(int fromIndex, int toIndex) {
- to.copyFrom(fromIndex, toIndex, ${minor.class}Vector.this);
+ public boolean copyValueSafe(int fromIndex, int toIndex) {
+ return to.copyFromSafe(fromIndex, toIndex, ${minor.class}Vector.this);
}
}
@@ -192,6 +202,12 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
public final class Accessor extends BaseValueVector.BaseAccessor{
+ final FieldReader reader = new ${minor.class}ReaderImpl(${minor.class}Vector.this);
+
+ public FieldReader getReader(){
+ return reader;
+ }
+
public int getValueCount() {
return valueCount;
}
@@ -216,15 +232,17 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
holder.index = data.getInt((index * ${type.width})+ ${minor.milliSecondsSize});
}
- void get(int index, Nullable${minor.class}Holder holder){
+ public void get(int index, Nullable${minor.class}Holder holder){
+ holder.isSet = 1;
holder.value = data.getLong(index * ${type.width});
holder.index = data.getInt((index * ${type.width})+ ${minor.milliSecondsSize});
}
@Override
- public Object getObject(int index) {
-
- return new Timestamp(data.getLong(index * ${type.width}));
+ public ${friendlyType} getObject(int index) {
+ long l = data.getLong(index * ${type.width});
+ DateTime t = new DateTime(l);
+ return t;
}
<#elseif (minor.class == "Interval")>
@@ -236,15 +254,25 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
holder.milliSeconds = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
}
- void get(int index, Nullable${minor.class}Holder holder){
+ public void get(int index, Nullable${minor.class}Holder holder){
int offsetIndex = index * ${type.width};
+ holder.isSet = 1;
holder.months = data.getInt(offsetIndex);
holder.days = data.getInt(offsetIndex + ${minor.daysOffset});
holder.milliSeconds = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
}
@Override
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
+ int offsetIndex = index * ${type.width};
+ int months = data.getInt(offsetIndex);
+ int days = data.getInt(offsetIndex + ${minor.daysOffset});
+ int millis = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
+ Period p = new Period();
+ return p.plusMonths(months).plusDays(days).plusMillis(millis);
+ }
+
+ public StringBuilder getAsString(int index) {
int offsetIndex = index * ${type.width};
@@ -287,14 +315,24 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
holder.milliSeconds = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
}
- void get(int index, Nullable${minor.class}Holder holder){
+ public void get(int index, Nullable${minor.class}Holder holder){
int offsetIndex = index * ${type.width};
+ holder.isSet = 1;
holder.days = data.getInt(offsetIndex);
holder.milliSeconds = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
}
@Override
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
+ int offsetIndex = index * ${type.width};
+ int millis = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
+ int days = data.getInt(offsetIndex);
+ Period p = new Period();
+ return p.plusDays(days).plusMillis(millis);
+ }
+
+
+ public StringBuilder getAsStringBuilder(int index) {
int offsetIndex = index * ${type.width};
int millis = data.getInt(offsetIndex + ${minor.milliSecondsOffset});
@@ -349,8 +387,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
}
- void get(int index, Nullable${minor.class}Holder holder) {
-
+ public void get(int index, Nullable${minor.class}Holder holder) {
+ holder.isSet = 1;
holder.start = index * ${type.width};
holder.buffer = data;
@@ -366,7 +404,7 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
}
@Override
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
<#if (minor.class == "Decimal28Sparse") || (minor.class == "Decimal38Sparse")>
// Get the BigDecimal object
return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromSparse(data, index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale());
@@ -381,13 +419,14 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
holder.start = index * ${type.width};
}
- void get(int index, Nullable${minor.class}Holder holder){
+ public void get(int index, Nullable${minor.class}Holder holder){
+ holder.isSet = 1;
holder.buffer = data;
holder.start = index * ${type.width};
}
@Override
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
ByteBuf dst = io.netty.buffer.Unpooled.wrappedBuffer(new byte[${type.width}]);
//dst = new io.netty.buffer.SwappedByteBuf(dst);
@@ -407,58 +446,54 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
}
<#if minor.class == "Date">
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
org.joda.time.DateTime date = new org.joda.time.DateTime(get(index), org.joda.time.DateTimeZone.UTC);
date = date.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
-
- return new Date(date.getMillis());
-
+ return date;
}
+
<#elseif minor.class == "TimeStamp">
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
org.joda.time.DateTime date = new org.joda.time.DateTime(get(index), org.joda.time.DateTimeZone.UTC);
date = date.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
-
- return new Timestamp(date.getMillis());
+ return date;
}
+
<#elseif minor.class == "IntervalYear">
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
int value = get(index);
int years = (value / org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
int months = (value % org.apache.drill.exec.expr.fn.impl.DateUtility.yearsToMonths);
-
- String yearString = (Math.abs(years) == 1) ? " year " : " years ";
- String monthString = (Math.abs(months) == 1) ? " month " : " months ";
-
-
- return(new StringBuilder().append(years).append(yearString).append(months).append(monthString));
-
-
+ Period p = new Period();
+ return p.plusYears(years).plusMonths(months);
}
+
<#elseif minor.class == "Time">
@Override
- public Object getObject(int index) {
+ public DateTime getObject(int index) {
org.joda.time.DateTime time = new org.joda.time.DateTime(get(index), org.joda.time.DateTimeZone.UTC);
time = time.withZoneRetainFields(org.joda.time.DateTimeZone.getDefault());
-
- return new Time(time.getMillis());
+ return time;
}
<#elseif minor.class == "Decimal9" || minor.class == "Decimal18">
@Override
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
BigInteger value = BigInteger.valueOf(((${type.boxedType})get(index)).${type.javaType}Value());
return new BigDecimal(value, getField().getScale());
}
<#else>
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
+ return get(index);
+ }
+ public ${minor.javaType!type.javaType} getPrimitiveObject(int index) {
return get(index);
}
</#if>
@@ -471,8 +506,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
holder.value = data.get${(minor.javaType!type.javaType)?cap_first}(index * ${type.width});
}
- void get(int index, Nullable${minor.class}Holder holder){
-
+ public void get(int index, Nullable${minor.class}Holder holder){
+ holder.isSet = 1;
holder.value = data.get${(minor.javaType!type.javaType)?cap_first}(index * ${type.width});
}
@@ -641,7 +676,7 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements F
return true;
}
- protected void set(int index, Nullable${minor.class}Holder holder){
+ public void set(int index, Nullable${minor.class}Holder holder){
data.setBytes(index * ${type.width}, holder.buffer, holder.start, ${type.width});
}
</#if>
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/ListWriters.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ListWriters.java b/exec/java-exec/src/main/codegen/templates/ListWriters.java
new file mode 100644
index 0000000..64d92e2
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/ListWriters.java
@@ -0,0 +1,180 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+
+<#list ["Single", "Repeated"] as mode>
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/${mode}ListWriter.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+<#if mode == "Single">
+ <#assign containerClass = "AbstractContainerVector" />
+ <#assign index = "idx()">
+<#else>
+ <#assign containerClass = "RepeatedListVector" />
+ <#assign index = "currentChildIndex">
+</#if>
+
+
+<#include "/@includes/vv_imports.ftl" />
+
+
+@SuppressWarnings("unused")
+public class ${mode}ListWriter extends AbstractFieldWriter{
+
+ static enum Mode { INIT, IN_MAP, IN_LIST <#list vv.types as type><#list type.minor as minor>, IN_${minor.class?upper_case}</#list></#list> }
+
+ private final String name;
+ protected final ${containerClass} container;
+ private Mode mode = Mode.INIT;
+ private FieldWriter writer;
+ protected ValueVector innerVector;
+
+ <#if mode == "Repeated">private int currentChildIndex = 0;</#if>
+ ${mode}ListWriter(String name, ${containerClass} container, FieldWriter parent){
+ super(parent);
+ this.name = name;
+ this.container = container;
+ }
+
+
+ public void allocate(){
+ if(writer != null) writer.allocate();
+ <#if mode == "Repeated">
+ inform(container.allocateNewSafe());
+ </#if>
+ }
+
+ public void clear(){
+ writer.clear();
+ }
+
+ public void setValueCount(int count){
+ if(innerVector != null) innerVector.getMutator().setValueCount(count);
+ }
+
+ public MapWriter map(){
+ switch(mode){
+ case INIT:
+ int vectorCount = container.size();
+ RepeatedMapVector vector = container.addOrGet(name, RepeatedMapVector.TYPE, RepeatedMapVector.class);
+ innerVector = vector;
+ writer = new RepeatedMapWriter(vector, this);
+ if(vectorCount != container.size()) writer.allocate();
+ writer.setPosition(${index});
+ mode = Mode.IN_MAP;
+ return writer;
+ case IN_MAP:
+ return writer;
+ }
+
+ throw new IllegalStateException(String.format("Needed to be in state INIT or IN_MAP but in mode %s", mode.name()));
+
+ }
+
+ public ListWriter list(){
+ switch(mode){
+ case INIT:
+ int vectorCount = container.size();
+ RepeatedListVector vector = container.addOrGet(name, RepeatedListVector.TYPE, RepeatedListVector.class);
+ innerVector = vector;
+ writer = new RepeatedListWriter(null, vector, this);
+ if(vectorCount != container.size()) writer.allocate();
+ writer.setPosition(${index});
+ mode = Mode.IN_LIST;
+ return writer;
+ case IN_LIST:
+ return writer;
+ }
+
+ throw new IllegalStateException(String.format("Needed to be in state INIT or IN_LIST but in mode %s", mode.name()));
+
+ }
+
+ <#list vv.types as type><#list type.minor as minor>
+ <#assign lowerName = minor.class?uncap_first />
+ <#assign upperName = minor.class?upper_case />
+ <#assign capName = minor.class?cap_first />
+ <#if lowerName == "int" ><#assign lowerName = "integer" /></#if>
+
+ private static final MajorType ${upperName}_TYPE = Types.repeated(MinorType.${upperName});
+
+ public ${capName}Writer ${lowerName}(){
+ switch(mode){
+ case INIT:
+ int vectorCount = container.size();
+ Repeated${capName}Vector vector = container.addOrGet(name, ${upperName}_TYPE, Repeated${capName}Vector.class);
+ writer = new Repeated${capName}WriterImpl(vector, this);
+ if(vectorCount != container.size()) writer.allocate();
+ writer.setPosition(${index});
+ mode = Mode.IN_${upperName};
+ return writer;
+ case IN_${upperName}:
+ return writer;
+ }
+
+ throw new IllegalStateException(String.format("Needed to be in state INIT or IN_${upperName} but in mode %s", mode.name()));
+ }
+ </#list></#list>
+
+ <#if mode == "Repeated">
+ public void start(){
+ if(ok()){
+ // update the repeated vector to state that there is current+1 objects.
+ RepeatedListHolder h = new RepeatedListHolder();
+ container.getAccessor().get(idx(), h);
+ if(h.start >= h.end){
+ container.getMutator().startNewGroup(idx());
+ }
+ currentChildIndex = container.getMutator().add(idx());
+ if(currentChildIndex == -1){
+ inform(false);
+ }else{
+ if(writer != null) writer.setPosition(currentChildIndex);
+ }
+ }
+ }
+
+
+
+ public void end(){
+ // noop, we initialize state at start rather than end.
+ }
+ <#else>
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ if(writer != null) writer.setPosition(index);
+ }
+
+ public void start(){
+ // noop
+ }
+
+ public void end(){
+ // noop
+ }
+ </#if>
+
+}
+</#list>
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/MapWriters.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/MapWriters.java b/exec/java-exec/src/main/codegen/templates/MapWriters.java
new file mode 100644
index 0000000..7aa5a2e
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/MapWriters.java
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<#list ["Single", "Repeated"] as mode>
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/${mode}MapWriter.java" />
+<#if mode == "Single">
+<#assign containerClass = "MapVector" />
+<#assign index = "idx()">
+<#else>
+<#assign containerClass = "RepeatedMapVector" />
+<#assign index = "currentChildIndex">
+</#if>
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+import java.util.Map;
+
+import org.apache.drill.exec.expr.holders.RepeatedMapHolder;
+import org.apache.drill.exec.vector.AllocationHelper;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.FieldWriter;
+
+import com.google.common.collect.Maps;
+
+@SuppressWarnings("unused")
+public class ${mode}MapWriter extends AbstractFieldWriter{
+
+ protected final ${containerClass} container;
+ private final Map<String, FieldWriter> fields = Maps.newHashMap();
+ <#if mode == "Repeated">private int currentChildIndex = 0;</#if>
+
+ public ${mode}MapWriter(${containerClass} container, FieldWriter parent) {
+ super(parent);
+ this.container = container;
+ }
+
+
+ public MapWriter map(String name){
+ FieldWriter writer = fields.get(name);
+ if(writer == null){
+ int vectorCount = container.size();
+ MapVector vector = container.addOrGet(name, MapVector.TYPE, MapVector.class);
+ writer = new SingleMapWriter(vector, this);
+ if(vectorCount != container.size()) writer.allocate();
+ writer.setPosition(${index});
+ fields.put(name, writer);
+ }
+ return writer;
+ }
+
+ public void allocate(){
+ inform(container.allocateNewSafe());
+ for(FieldWriter w : fields.values()){
+ w.allocate();
+ }
+ }
+
+ public void clear(){
+ container.clear();
+ for(FieldWriter w : fields.values()){
+ w.clear();
+ }
+
+ }
+
+
+ public ListWriter list(String name){
+ FieldWriter writer = fields.get(name);
+ if(writer == null){
+ writer = new SingleListWriter(name, container, this);
+ writer.setPosition(${index});
+ fields.put(name, writer);
+ }
+ return writer;
+ }
+
+
+ <#if mode == "Repeated">
+ public void start(){
+ if(ok()){
+ // update the repeated vector to state that there is current+1 objects.
+
+ RepeatedMapHolder h = new RepeatedMapHolder();
+ container.getAccessor().get(idx(), h);
+ if(h.start >= h.end){
+ container.getMutator().startNewGroup(idx());
+ }
+ currentChildIndex = container.getMutator().add(idx());
+ if(currentChildIndex == -1){
+ inform(false);
+ }else{
+ for(FieldWriter w: fields.values()){
+ w.setPosition(currentChildIndex);
+ }
+ }
+ }
+ }
+
+
+ public void end(){
+ // noop
+ }
+ <#else>
+
+ public void setValueCount(int count){
+ container.getMutator().setValueCount(count);
+ }
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ for(FieldWriter w: fields.values()){
+ w.setPosition(index);
+ }
+ }
+ public void start(){
+ // noop
+ }
+
+ public void end(){
+ // noop
+ }
+ </#if>
+
+ <#list vv.types as type><#list type.minor as minor>
+ <#assign lowerName = minor.class?uncap_first />
+ <#if lowerName == "int" ><#assign lowerName = "integer" /></#if>
+ <#assign upperName = minor.class?upper_case />
+ <#assign capName = minor.class?cap_first />
+ <#assign vectName = capName />
+ <#assign vectName = "Nullable${capName}" />
+
+ private static final MajorType ${upperName}_TYPE = Types.optional(MinorType.${upperName});
+ public ${minor.class}Writer ${lowerName}(String name){
+ FieldWriter writer = fields.get(name);
+ if(writer == null){
+ ${vectName}Vector vector = container.addOrGet(name, ${upperName}_TYPE, ${vectName}Vector.class);
+ AllocationHelper.allocate(vector, 1000, 100, 10);
+ writer = new ${vectName}WriterImpl(vector, this);
+ writer.setPosition(${index});
+ fields.put(name, writer);
+ }
+ return writer;
+ }
+
+
+ </#list></#list>
+
+
+
+}
+</#list>
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/NullReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/NullReader.java b/exec/java-exec/src/main/codegen/templates/NullReader.java
new file mode 100644
index 0000000..962eeda
--- /dev/null
+++ b/exec/java-exec/src/main/codegen/templates/NullReader.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+<@pp.dropOutputFile />
+<@pp.changeOutputFile name="/org/apache/drill/exec/vector/complex/impl/NullReader.java" />
+
+
+<#include "/@includes/license.ftl" />
+
+package org.apache.drill.exec.vector.complex.impl;
+
+<#include "/@includes/vv_imports.ftl" />
+
+@SuppressWarnings("unused")
+public class NullReader extends AbstractBaseReader implements FieldReader{
+
+ public static final NullReader INSTANCE = new NullReader();
+
+ private NullReader(){
+ super();
+ }
+
+ <#list vv.types as type><#list type.minor as minor><#assign name = minor.class?cap_first />
+ public void read(Nullable${name}Holder holder){
+ holder.isSet = 0;
+ }
+
+ public void read(int arrayIndex, ${name}Holder holder){
+ throw new ArrayIndexOutOfBoundsException();
+ }
+
+ public void copyAsValue(${minor.class}Writer writer){}
+ public void copyAsField(String name, ${minor.class}Writer writer){}
+
+ public void read(int arrayIndex, Nullable${name}Holder holder){
+ throw new ArrayIndexOutOfBoundsException();
+ }
+ </#list></#list>
+
+ public int size(){
+ return 0;
+ }
+
+ public boolean isSet(){
+ return false;
+ }
+
+ public boolean next(){
+ return false;
+ }
+
+ public RepeatedMapReader map(){
+ return this;
+ }
+
+ public RepeatedListReader list(){
+ return this;
+ }
+
+ public MapReader map(String name){
+ return this;
+ }
+
+ public ListReader list(String name){
+ return this;
+ }
+
+ public FieldReader reader(String name){
+ return this;
+ }
+
+ public FieldReader reader(){
+ return this;
+ }
+
+ private void fail(String name){
+ throw new IllegalArgumentException(String.format("You tried to read a %s type when you are using a ValueReader of type %s.", name, this.getClass().getSimpleName()));
+ }
+
+ <#list ["Object", "BigDecimal", "Integer", "Long", "Boolean",
+ "Character", "DateTime", "Period", "Double", "Float",
+ "Text", "String", "Byte", "Short", "byte[]"] as friendlyType>
+ <#assign safeType=friendlyType />
+ <#if safeType=="byte[]"><#assign safeType="ByteArray" /></#if>
+
+ public ${friendlyType} read${safeType}(int arrayIndex){
+ return null;
+ }
+
+ public ${friendlyType} read${safeType}(){
+ return null;
+ }
+ </#list>
+
+}
+
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java b/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
index 6839b37..403ba4f 100644
--- a/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/NullableValueVectors.java
@@ -26,6 +26,8 @@ import java.lang.UnsupportedOperationException;
<#assign className = "Nullable${minor.class}Vector" />
<#assign valuesName = "${minor.class}Vector" />
+<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) />
+
<@pp.changeOutputFile name="/org/apache/drill/exec/vector/${className}.java" />
<#include "/@includes/license.ftl" />
@@ -53,7 +55,7 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
public ${className}(MaterializedField field, BufferAllocator allocator) {
super(field, allocator);
this.bits = new BitVector(null, allocator);
- this.values = new ${minor.class}Vector(null, allocator);
+ this.values = new ${minor.class}Vector(field, allocator);
}
public int getValueCapacity(){
@@ -84,21 +86,27 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
<#if type.major == "VarLen">
@Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
+ public SerializedField getMetadata() {
+ return getMetadataBuilder()
.setValueCount(valueCount)
.setVarByteLength(values.getVarByteLength())
.setBufferLength(getBufferSize())
.build();
}
- @Override
public void allocateNew() {
- values.allocateNew();
- bits.allocateNew();
+ if(!allocateNewSafe()){
+ throw new OutOfMemoryRuntimeException("Failure while allocating buffer.");
+ }
+ }
+
+ @Override
+ public boolean allocateNewSafe() {
+ if(!values.allocateNewSafe()) return false;
+ if(!bits.allocateNewSafe()) return false;
mutator.reset();
accessor.reset();
+ return true;
}
@Override
@@ -123,8 +131,8 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getBufferLength(), metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -136,9 +144,8 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
<#else>
@Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
+ public SerializedField getMetadata() {
+ return getMetadataBuilder()
.setValueCount(valueCount)
.setBufferLength(getBufferSize())
.build();
@@ -151,6 +158,16 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
mutator.reset();
accessor.reset();
}
+
+
+ @Override
+ public boolean allocateNewSafe() {
+ if(!values.allocateNewSafe()) return false;
+ if(!bits.allocateNewSafe()) return false;
+ mutator.reset();
+ accessor.reset();
+ return true;
+ }
@Override
public void allocateNew(int valueCount) {
@@ -173,8 +190,8 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -235,8 +252,8 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
}
@Override
- public void copyValue(int fromIndex, int toIndex) {
- to.copyFrom(fromIndex, toIndex, Nullable${minor.class}Vector.this);
+ public boolean copyValueSafe(int fromIndex, int toIndex) {
+ return to.copyFromSafe(fromIndex, toIndex, Nullable${minor.class}Vector.this);
}
}
@@ -261,7 +278,19 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
protected void copyFrom(int fromIndex, int thisIndex, Nullable${minor.class}Vector from){
if (!from.getAccessor().isNull(fromIndex)) {
mutator.set(thisIndex, from.getAccessor().get(fromIndex));
-}
+ }
+ }
+
+ public boolean copyFromSafe(int fromIndex, int thisIndex, ${minor.class}Vector from){
+ boolean success = values.copyFromSafe(fromIndex, thisIndex, from);
+ bits.getMutator().set(thisIndex, 1);
+
+ <#if type.major == "VarLen">
+ if (success) {
+ mutator.lastSet = thisIndex;
+ }
+ </#if>
+ return success;
}
public boolean copyFromSafe(int fromIndex, int thisIndex, Nullable${minor.class}Vector from){
@@ -277,6 +306,11 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
public final class Accessor implements ValueVector.Accessor{
+ final FieldReader reader = new Nullable${minor.class}ReaderImpl(Nullable${minor.class}Vector.this);
+
+ public FieldReader getReader(){
+ return reader;
+ }
/**
* Get the element at the specified position.
*
@@ -308,22 +342,12 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
}
@Override
- public Object getObject(int index) {
-
+ public ${friendlyType} getObject(int index) {
if (isNull(index)) {
return null;
+ }else{
+ return values.getAccessor().getObject(index);
}
- <#if minor.class == "Decimal9" || minor.class == "Decimal18">
- // Get the value and construct a BigDecimal Object
- BigInteger value = BigInteger.valueOf(((${type.boxedType})values.getAccessor().get(index)).${type.javaType}Value());
- return new BigDecimal(value, getField().getScale());
- <#elseif minor.class == "Decimal38Sparse" || minor.class == "Decimal28Sparse">
- return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromSparse(values.getData(), index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale());
- <#elseif minor.class == "Decimal38Dense" || minor.class == "Decimal28Dense">
- return org.apache.drill.common.util.DecimalUtility.getBigDecimalFromDense(values.getData(), index * ${type.width}, ${minor.nDecimalDigits}, getField().getScale(), ${minor.maxPrecisionDigits}, ${type.width});
- <#else>
- return values.getAccessor().getObject(index);
- </#if>
}
public int getValueCount(){
@@ -372,7 +396,7 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
throw new UnsupportedOperationException();
<#else>
for (int i = lastSet + 1; i < index; i++) {
- values.getMutator().set(i, new byte[]{});
+ if(!values.getMutator().setSafe(i, new byte[]{})) return false;
}
boolean b1 = bits.getMutator().setSafe(index, 1);
boolean b2 = values.getMutator().setSafe(index, value, start, length);
@@ -422,7 +446,7 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
<#if type.major == "VarLen">
for (int i = lastSet + 1; i < index; i++) {
- values.getMutator().set(i, new byte[]{});
+ if(!values.getMutator().setSafe(i, new byte[]{})) return false;
}
</#if>
boolean b1 = bits.getMutator().setSafe(index, 1);
@@ -437,6 +461,25 @@ public final class ${className} extends BaseValueVector implements <#if type.maj
}
+ public boolean setSafe(int index, ${minor.class}Holder value) {
+
+ <#if type.major == "VarLen">
+ for (int i = lastSet + 1; i < index; i++) {
+ if(!values.getMutator().setSafe(i, new byte[]{})) return false;
+ }
+ </#if>
+ boolean b1 = bits.getMutator().setSafe(index, 1);
+ boolean b2 = values.getMutator().setSafe(index, value);
+ if(b1 && b2){
+ setCount++;
+ <#if type.major == "VarLen">lastSet = index;</#if>
+ return true;
+ }else{
+ return false;
+ }
+
+ }
+
<#if !(type.major == "VarLen" || minor.class == "Decimal28Sparse" || minor.class == "Decimal38Sparse" || minor.class == "Decimal28Dense" || minor.class == "Decimal38Dense" || minor.class == "TimeStampTZ" || minor.class == "Interval" || minor.class == "IntervalDay")>
public boolean setSafe(int index, ${minor.javaType!type.javaType} value) {
<#if type.major == "VarLen">
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/ObjectInspectorHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ObjectInspectorHelper.java b/exec/java-exec/src/main/codegen/templates/ObjectInspectorHelper.java
index 3c72a96..23b969d 100644
--- a/exec/java-exec/src/main/codegen/templates/ObjectInspectorHelper.java
+++ b/exec/java-exec/src/main/codegen/templates/ObjectInspectorHelper.java
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
+<@pp.dropOutputFile />
<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/hive/ObjectInspectorHelper.java" />
<#include "/@includes/license.ftl" />
[09/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/ObjectInspectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ObjectInspectors.java b/exec/java-exec/src/main/codegen/templates/ObjectInspectors.java
index cf96cbc..9a8c837 100644
--- a/exec/java-exec/src/main/codegen/templates/ObjectInspectors.java
+++ b/exec/java-exec/src/main/codegen/templates/ObjectInspectors.java
@@ -15,9 +15,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-<#list drillOI.map as entry>
-
+<@pp.dropOutputFile />
+<#list drillOI.map as entry>
<@pp.changeOutputFile name="/org/apache/drill/exec/expr/fn/impl/hive/Drill${entry.holder}ObjectInspector.java" />
<#include "/@includes/license.ftl" />
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/RecordValueAccessor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/RecordValueAccessor.java b/exec/java-exec/src/main/codegen/templates/RecordValueAccessor.java
index c7f178c..d4c6817 100644
--- a/exec/java-exec/src/main/codegen/templates/RecordValueAccessor.java
+++ b/exec/java-exec/src/main/codegen/templates/RecordValueAccessor.java
@@ -53,7 +53,7 @@ public class RecordValueAccessor {
int fieldId = 0;
for (MaterializedField field : schema) {
Class<?> vectorClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getType().getMode());
- vectors[fieldId] = batch.getValueAccessorById(fieldId, vectorClass).getValueVector();
+ vectors[fieldId] = batch.getValueAccessorById(vectorClass, fieldId).getValueVector();
fieldId++;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java b/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
index 7d10438..537c7fd 100644
--- a/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/RepeatedValueVectors.java
@@ -18,9 +18,14 @@
import java.lang.Override;
+import org.apache.drill.exec.vector.UInt4Vector;
+import org.mortbay.jetty.servlet.Holder;
+
<@pp.dropOutputFile />
<#list vv.types as type>
<#list type.minor as minor>
+<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) />
+
<@pp.changeOutputFile name="/org/apache/drill/exec/vector/Repeated${minor.class}Vector.java" />
<#include "/@includes/license.ftl" />
@@ -120,11 +125,13 @@ package org.apache.drill.exec.vector;
}
@Override
- public void copyValue(int fromIndex, int toIndex) {
- to.copyFrom(fromIndex, toIndex, Repeated${minor.class}Vector.this);
+ public boolean copyValueSafe(int fromIndex, int toIndex) {
+ return to.copyFromSafe(fromIndex, toIndex, Repeated${minor.class}Vector.this);
}
}
+
+
<#if type.major == "VarLen">
public void copyFrom(int inIndex, int outIndex, Repeated${minor.class}Vector v){
int count = v.getAccessor().getCount(inIndex);
@@ -136,7 +143,7 @@ package org.apache.drill.exec.vector;
public boolean copyFromSafe(int inIndex, int outIndex, Repeated${minor.class}Vector v){
int count = v.getAccessor().getCount(inIndex);
- getMutator().startNewGroup(outIndex);
+ if(!getMutator().startNewGroup(outIndex)) return false;
for (int i = 0; i < count; i++) {
if (!getMutator().addSafe(outIndex, v.getAccessor().get(inIndex, i))) {
return false;
@@ -155,18 +162,15 @@ package org.apache.drill.exec.vector;
}
</#if>
- <#if type.major == "VarLen">
- @Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
- .setGroupCount(this.parentValueCount)
- .setValueCount(this.childValueCount)
- .setVarByteLength(values.getVarByteLength())
- .setBufferLength(getBufferSize())
- .build();
+ public boolean allocateNewSafe(){
+ if(!offsets.allocateNewSafe()) return false;
+ if(!values.allocateNewSafe()) return false;
+ mutator.reset();
+ accessor.reset();
+ sliceOffset = 0;
+ return true;
}
-
+
public void allocateNew() {
offsets.allocateNew();
values.allocateNew();
@@ -174,6 +178,17 @@ package org.apache.drill.exec.vector;
accessor.reset();
sliceOffset = 0;
}
+
+ <#if type.major == "VarLen">
+ @Override
+ public SerializedField getMetadata() {
+ return getMetadataBuilder() //
+ .setGroupCount(this.parentValueCount) //
+ .setValueCount(this.childValueCount) //
+ .setVarByteLength(values.getVarByteLength()) //
+ .setBufferLength(getBufferSize()) //
+ .build();
+ }
public void allocateNew(int totalBytes, int parentValueCount, int childValueCount) {
offsets.allocateNew(parentValueCount+1);
@@ -196,8 +211,8 @@ package org.apache.drill.exec.vector;
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getVarByteLength(), metadata.getGroupCount(), metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -209,23 +224,13 @@ package org.apache.drill.exec.vector;
<#else>
@Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
+ public SerializedField getMetadata() {
+ return getMetadataBuilder()
.setGroupCount(this.parentValueCount)
.setValueCount(this.childValueCount)
.setBufferLength(getBufferSize())
.build();
}
-
- @Override
- public void allocateNew() {
- clear();
- offsets.allocateNew();
- values.allocateNew();
- mutator.reset();
- accessor.reset();
- }
public void allocateNew(int parentValueCount, int childValueCount) {
clear();
@@ -246,8 +251,8 @@ package org.apache.drill.exec.vector;
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getGroupCount(), metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -276,6 +281,13 @@ package org.apache.drill.exec.vector;
}
public final class Accessor implements ValueVector.Accessor{
+
+ final FieldReader reader = new Repeated${minor.class}ReaderImpl(Repeated${minor.class}Vector.this);
+
+ public FieldReader getReader(){
+ return reader;
+ }
+
/**
* Get the elements at the given index.
*/
@@ -283,8 +295,8 @@ package org.apache.drill.exec.vector;
return offsets.getAccessor().get(index+1) - offsets.getAccessor().get(index);
}
- public Object getObject(int index) {
- List<Object> vals = Lists.newArrayList();
+ public List<${friendlyType}> getObject(int index) {
+ List<${friendlyType}> vals = Lists.newArrayList();
int start = offsets.getAccessor().get(index) - sliceOffset;
int end = offsets.getAccessor().get(index+1) - sliceOffset;
for(int i = start; i < end; i++){
@@ -292,6 +304,11 @@ package org.apache.drill.exec.vector;
}
return vals;
}
+
+ public ${friendlyType} getSingleObject(int index, int arrayIndex){
+ int start = offsets.getAccessor().get(index);
+ return values.getAccessor().getObject(start + arrayIndex);
+ }
/**
* Get a value for the given record. Each element in the repeated field is accessed by
@@ -323,6 +340,12 @@ package org.apache.drill.exec.vector;
assert offset >= 0;
values.getAccessor().get(offset + positionIndex, holder);
}
+
+ public void get(int index, int positionIndex, Nullable${minor.class}Holder holder) {
+ int offset = offsets.getAccessor().get(index);
+ assert offset >= 0;
+ values.getAccessor().get(offset + positionIndex, holder);
+ }
public MaterializedField getField() {
return field;
@@ -347,8 +370,12 @@ package org.apache.drill.exec.vector;
private Mutator(){
}
- public void startNewGroup(int index) {
+ public boolean startNewGroup(int index) {
+ if(getValueCapacity() <= index){
+ return false;
+ }
offsets.getMutator().set(index+1, offsets.getAccessor().get(index));
+ return true;
}
/**
@@ -370,14 +397,43 @@ package org.apache.drill.exec.vector;
}
public boolean addSafe(int index, byte[] bytes, int start, int length) {
+ if(offsets.getValueCapacity() <= index+1) return false;
int nextOffset = offsets.getAccessor().get(index+1);
boolean b1 = values.getMutator().setSafe(nextOffset, bytes, start, length);
boolean b2 = offsets.getMutator().setSafe(index+1, nextOffset+1);
return (b1 && b2);
}
+
+
</#if>
- public void add(int index, ${minor.class}Holder holder){
+ public boolean setSafe(int index, Repeated${minor.class}Holder h){
+ ${minor.class}Holder ih = new ${minor.class}Holder();
+ getMutator().startNewGroup(index);
+ for(int i = h.start; i < h.end; i++){
+ h.vector.getAccessor().get(i, ih);
+ if(!getMutator().addSafe(index, ih) ) return false;
+ }
+ return true;
+ }
+
+ public boolean addSafe(int index, ${minor.class}Holder holder){
+ if(offsets.getValueCapacity() <= index+1) return false;
+ int nextOffset = offsets.getAccessor().get(index+1);
+ boolean b1 = values.getMutator().setSafe(nextOffset, holder);
+ boolean b2 = offsets.getMutator().setSafe(index+1, nextOffset+1);
+ return (b1 && b2);
+ }
+
+ public boolean addSafe(int index, Nullable${minor.class}Holder holder){
+ if(offsets.getValueCapacity() <= index+1) return false;
+ int nextOffset = offsets.getAccessor().get(index+1);
+ boolean b1 = values.getMutator().setSafe(nextOffset, holder);
+ boolean b2 = offsets.getMutator().setSafe(index+1, nextOffset+1);
+ return (b1 && b2);
+ }
+
+ protected void add(int index, ${minor.class}Holder holder){
int nextOffset = offsets.getAccessor().get(index+1);
values.getMutator().set(nextOffset, holder);
offsets.getMutator().set(index+1, nextOffset+1);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/SqlAccessors.java b/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
index 0e6b4a7..74ee9ed 100644
--- a/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
+++ b/exec/java-exec/src/main/codegen/templates/SqlAccessors.java
@@ -100,7 +100,7 @@ public class ${name}Accessor extends AbstractSqlAccessor{
<#if minor.class == "TimeStampTZ">
@Override
public Timestamp getTimestamp(int index) {
- return (Timestamp) (ac.getObject(index));
+ return new Timestamp(ac.getObject(index).getMillis());
}
<#elseif minor.class == "Interval" || minor.class == "IntervalDay">
@Override
@@ -110,7 +110,7 @@ public class ${name}Accessor extends AbstractSqlAccessor{
<#elseif minor.class.startsWith("Decimal")>
@Override
public BigDecimal getBigDecimal(int index) {
- return (BigDecimal) ac.getObject(index);
+ return ac.getObject(index);
}
<#else>
@Override
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/TypeHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/TypeHelper.java b/exec/java-exec/src/main/codegen/templates/TypeHelper.java
index f17df04..4755e92 100644
--- a/exec/java-exec/src/main/codegen/templates/TypeHelper.java
+++ b/exec/java-exec/src/main/codegen/templates/TypeHelper.java
@@ -23,7 +23,11 @@
package org.apache.drill.exec.expr;
<#include "/@includes/vv_imports.ftl" />
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.vector.accessor.*;
+import org.apache.drill.exec.vector.complex.RepeatedMapVector;
public class TypeHelper {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TypeHelper.class);
@@ -66,8 +70,29 @@ public class TypeHelper {
throw new UnsupportedOperationException();
}
- public static Class<? extends ValueVector> getValueVectorClass(MinorType type, DataMode mode){
+ public static ValueVector getNewVector(SchemaPath parentPath, String name, BufferAllocator allocator, MajorType type){
+ SchemaPath child = parentPath.getChild(name);
+ MaterializedField field = MaterializedField.create(child, type);
+ return getNewVector(field, allocator);
+ }
+
+
+ public static Class<?> getValueVectorClass(MinorType type, DataMode mode){
switch (type) {
+ case MAP:
+ switch (mode) {
+ case REQUIRED:
+ return MapVector.class;
+ case REPEATED:
+ return RepeatedMapVector.class;
+ }
+
+ case LIST:
+ switch (mode) {
+ case REPEATED:
+ return RepeatedListVector.class;
+ }
+
<#list vv.types as type>
<#list type.minor as minor>
case ${minor.class?upper_case}:
@@ -86,9 +111,99 @@ public class TypeHelper {
}
throw new UnsupportedOperationException();
}
-
+ public static Class<?> getReaderClassName( MinorType type, DataMode mode){
+ switch (type) {
+ case MAP:
+ switch (mode) {
+ case REQUIRED:
+ return SingleMapReaderImpl.class;
+ case REPEATED:
+ return RepeatedMapReaderImpl.class;
+ }
+ case LIST:
+ switch (mode) {
+ case REQUIRED:
+ return SingleListReaderImpl.class;
+ case REPEATED:
+ return RepeatedListReaderImpl.class;
+ }
+
+<#list vv.types as type>
+ <#list type.minor as minor>
+ case ${minor.class?upper_case}:
+ switch (mode) {
+ case REQUIRED:
+ return ${minor.class}ReaderImpl.class;
+ case OPTIONAL:
+ return Nullable${minor.class}ReaderImpl.class;
+ case REPEATED:
+ return Repeated${minor.class}ReaderImpl.class;
+ }
+ </#list>
+</#list>
+ default:
+ break;
+ }
+ throw new UnsupportedOperationException();
+ }
+
+ public static Class<?> getWriterInterface( MinorType type, DataMode mode){
+ switch (type) {
+ case MAP: return MapWriter.class;
+ case LIST: return ListWriter.class;
+<#list vv.types as type>
+ <#list type.minor as minor>
+ case ${minor.class?upper_case}: return ${minor.class}Writer.class;
+ </#list>
+</#list>
+ default:
+ break;
+ }
+ throw new UnsupportedOperationException();
+ }
+
+ public static Class<?> getWriterImpl( MinorType type, DataMode mode){
+ switch (type) {
+ case MAP:
+ switch (mode) {
+ case REQUIRED:
+ return SingleMapWriter.class;
+ case REPEATED:
+ return RepeatedMapWriter.class;
+ }
+ case LIST:
+ switch (mode) {
+ case REQUIRED:
+ return SingleListWriter.class;
+ case REPEATED:
+ return RepeatedListWriter.class;
+ }
+
+<#list vv.types as type>
+ <#list type.minor as minor>
+ case ${minor.class?upper_case}:
+ switch (mode) {
+ case REQUIRED:
+ return ${minor.class}WriterImpl.class;
+ case OPTIONAL:
+ return Nullable${minor.class}WriterImpl.class;
+ case REPEATED:
+ return Repeated${minor.class}WriterImpl.class;
+ }
+ </#list>
+</#list>
+ default:
+ break;
+ }
+ throw new UnsupportedOperationException();
+ }
+
public static JType getHolderType(JCodeModel model, MinorType type, DataMode mode){
switch (type) {
+ case MAP:
+ case LIST:
+ return model._ref(ComplexHolder.class);
+
<#list vv.types as type>
<#list type.minor as minor>
case ${minor.class?upper_case}:
@@ -112,6 +227,20 @@ public class TypeHelper {
MajorType type = field.getType();
switch (type.getMinorType()) {
+
+
+ case MAP:
+ switch (type.getMode()) {
+ case REQUIRED:
+ return new MapVector(field, allocator);
+ case REPEATED:
+ return new RepeatedMapVector(field, allocator);
+ }
+ case LIST:
+ switch (type.getMode()) {
+ case REPEATED:
+ return new RepeatedListVector(field, allocator);
+ }
<#list vv. types as type>
<#list type.minor as minor>
case ${minor.class?upper_case}:
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java b/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
index 6b05ec5..c2effbd 100644
--- a/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
+++ b/exec/java-exec/src/main/codegen/templates/VariableLengthVectors.java
@@ -19,6 +19,9 @@
<#list vv.types as type>
<#list type.minor as minor>
+<#assign friendlyType = (minor.friendlyType!minor.boxedType!type.boxedType) />
+
+
<#if type.major == "VarLen">
<@pp.changeOutputFile name="/org/apache/drill/exec/vector/${minor.class}Vector.java" />
@@ -81,12 +84,11 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
}
@Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
- .setValueCount(valueCount)
- .setVarByteLength(getVarByteLength())
- .setBufferLength(getBufferSize())
+ public SerializedField getMetadata() {
+ return getMetadataBuilder() //
+ .setValueCount(valueCount) //
+ .setVarByteLength(getVarByteLength()) //
+ .setBufferLength(getBufferSize()) //
.build();
}
@@ -103,8 +105,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getBufferLength(), metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -206,13 +208,19 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
}
@Override
- public void copyValue(int fromIndex, int toIndex) {
- to.copyFrom(fromIndex, toIndex, ${minor.class}Vector.this);
+ public boolean copyValueSafe(int fromIndex, int toIndex) {
+ return to.copyFromSafe(fromIndex, toIndex, ${minor.class}Vector.this);
}
}
- @Override
public void allocateNew() {
+ if(!allocateNewSafe()){
+ throw new OutOfMemoryRuntimeException("Failure while allocating buffer.");
+ }
+ }
+
+ @Override
+ public boolean allocateNewSafe() {
clear();
if (allocationMonitor > 5) {
allocationTotalByteCount = Math.max(1, (int) (allocationTotalByteCount * 0.9));
@@ -222,9 +230,16 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
allocationMonitor = 0;
}
data = allocator.buffer(allocationTotalByteCount);
+ if(data == null){
+ return false;
+ }
+
data.readerIndex(0);
- offsetVector.allocateNew();
+ if(!offsetVector.allocateNewSafe()){
+ return false;
+ }
offsetVector.getMutator().set(0,0);
+ return true;
}
public void allocateNew(int totalBytes, int valueCount) {
@@ -245,6 +260,11 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
}
public final class Accessor extends BaseValueVector.BaseAccessor{
+ final FieldReader reader = new ${minor.class}ReaderImpl(${minor.class}Vector.this);
+
+ public FieldReader getReader(){
+ return reader;
+ }
public byte[] get(int index) {
assert index >= 0;
@@ -262,7 +282,8 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
holder.buffer = data;
}
- void get(int index, Nullable${minor.class}Holder holder){
+ public void get(int index, Nullable${minor.class}Holder holder){
+ holder.isSet = 1;
holder.start = offsetVector.getAccessor().get(index);
holder.end = offsetVector.getAccessor().get(index + 1);
holder.buffer = data;
@@ -271,17 +292,19 @@ public final class ${minor.class}Vector extends BaseDataValueVector implements V
<#switch minor.class>
<#case "VarChar">
- public Object getObject(int index) {
- return new String(get(index), Charsets.UTF_8);
+ public ${friendlyType} getObject(int index) {
+ Text text = new Text();
+ text.set(get(index));
+ return text;
}
<#break>
<#case "Var16Char">
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
return new String(get(index), Charsets.UTF_16);
}
<#break>
<#default>
- public Object getObject(int index) {
+ public ${friendlyType} getObject(int index) {
return get(index);
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
index f4a6d7d..073a8d5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/VectorAccessibleSerializable.java
@@ -17,26 +17,34 @@
*/
package org.apache.drill.exec.cache;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.Timer;
-import com.hazelcast.nio.ObjectDataInput;
-import com.hazelcast.nio.ObjectDataOutput;
import io.netty.buffer.ByteBuf;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.List;
+
import org.apache.drill.common.util.DataInputInputStream;
import org.apache.drill.common.util.DataOutputOutputStream;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.metrics.DrillMetrics;
import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.record.*;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.VectorAccessible;
+import org.apache.drill.exec.record.VectorContainer;
+import org.apache.drill.exec.record.WritableBatch;
import org.apache.drill.exec.record.selection.SelectionVector2;
import org.apache.drill.exec.vector.ValueVector;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
-import java.io.*;
-import java.util.List;
+import com.codahale.metrics.MetricRegistry;
+import com.codahale.metrics.Timer;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.hazelcast.nio.ObjectDataInput;
+import com.hazelcast.nio.ObjectDataOutput;
/**
* A wrapper around a VectorAccessible. Will serialize a VectorAccessible and write to an OutputStream, or can read
@@ -109,10 +117,10 @@ public class VectorAccessibleSerializable implements DrillSerializable {
svMode = BatchSchema.SelectionVectorMode.TWO_BYTE;
}
List<ValueVector> vectorList = Lists.newArrayList();
- List<FieldMetadata> fieldList = batchDef.getFieldList();
- for (FieldMetadata metaData : fieldList) {
+ List<SerializedField> fieldList = batchDef.getFieldList();
+ for (SerializedField metaData : fieldList) {
int dataLength = metaData.getBufferLength();
- MaterializedField field = MaterializedField.create(metaData.getDef());
+ MaterializedField field = MaterializedField.create(metaData);
ByteBuf buf = allocator.buffer(dataLength);
buf.writeBytes(input, dataLength);
ValueVector vector = TypeHelper.getNewVector(field, allocator);
@@ -135,7 +143,7 @@ public class VectorAccessibleSerializable implements DrillSerializable {
retain = true;
writeToStream(output);
}
-
+
/**
* Serializes the VectorAccessible va and writes it to an output stream
@@ -203,7 +211,7 @@ public class VectorAccessibleSerializable implements DrillSerializable {
public VectorAccessible get() {
return va;
}
-
+
public SelectionVector2 getSv2() {
return sv2;
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
index 2280aec..fc48552 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ClassGenerator.java
@@ -20,7 +20,6 @@ package org.apache.drill.exec.expr;
import static org.apache.drill.exec.compile.sig.GeneratorMapping.GM;
import java.lang.reflect.Modifier;
-import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@@ -41,6 +40,7 @@ import org.apache.drill.exec.record.TypedFieldId;
import com.beust.jcommander.internal.Lists;
import com.beust.jcommander.internal.Maps;
import com.google.common.base.Preconditions;
+import com.sun.codemodel.JArray;
import com.sun.codemodel.JBlock;
import com.sun.codemodel.JClass;
import com.sun.codemodel.JClassAlreadyExistsException;
@@ -49,20 +49,21 @@ import com.sun.codemodel.JDefinedClass;
import com.sun.codemodel.JExpr;
import com.sun.codemodel.JExpression;
import com.sun.codemodel.JFieldRef;
+import com.sun.codemodel.JInvocation;
import com.sun.codemodel.JMethod;
import com.sun.codemodel.JMod;
import com.sun.codemodel.JType;
import com.sun.codemodel.JVar;
public class ClassGenerator<T>{
-
+
public static final GeneratorMapping DEFAULT_SCALAR_MAP = GM("doSetup", "doEval", null, null);
public static final GeneratorMapping DEFAULT_CONSTANT_MAP = GM("doSetup", "doSetup", null, null);
-
-
+
+
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClassGenerator.class);
public static enum BlockType {SETUP, EVAL, RESET, CLEANUP};
-
+
private final SignatureHolder sig;
private final EvaluationVisitor evaluationVisitor;
private final Map<ValueVectorSetup, JVar> vvDeclaration = Maps.newHashMap();
@@ -74,7 +75,7 @@ public class ClassGenerator<T>{
public final JDefinedClass clazz;
private final LinkedList<JBlock>[] blocks;
private final JCodeModel model;
-
+
private int index = 0;
private MappingSet mappings;
@@ -82,7 +83,7 @@ public class ClassGenerator<T>{
return new MappingSet("inIndex", "outIndex", DEFAULT_CONSTANT_MAP, DEFAULT_SCALAR_MAP);
}
-
+
@SuppressWarnings("unchecked")
ClassGenerator(CodeGenerator<T> codeGenerator, MappingSet mappingSet, SignatureHolder signature, EvaluationVisitor eval, JDefinedClass clazz, JCodeModel model) throws JClassAlreadyExistsException {
this.codeGenerator = codeGenerator;
@@ -96,7 +97,7 @@ public class ClassGenerator<T>{
blocks[i] = Lists.newLinkedList();
}
rotateBlock();
-
+
for(SignatureHolder child : signature.getChildHolders()){
String innerClassName = child.getSignatureClass().getSimpleName();
JDefinedClass innerClazz = clazz._class(Modifier.FINAL + Modifier.PRIVATE, innerClassName);
@@ -109,15 +110,15 @@ public class ClassGenerator<T>{
Preconditions.checkNotNull(inner);
return inner;
}
-
+
public MappingSet getMappingSet(){
return mappings;
}
-
+
public void setMappingSet(MappingSet mappings){
this.mappings = mappings;
}
-
+
public CodeGenerator<T> getCodeGenerator() {
return codeGenerator;
}
@@ -125,17 +126,17 @@ public class ClassGenerator<T>{
private GeneratorMapping getCurrentMapping(){
return mappings.getCurrentMapping();
}
-
+
public JBlock getBlock(String methodName){
JBlock blk = this.blocks[sig.get(methodName)].getLast();
Preconditions.checkNotNull(blk, "Requested method name of %s was not available for signature %s.", methodName, this.sig);
return blk;
}
-
+
public JBlock getBlock(BlockType type){
- return getBlock(getCurrentMapping().getMethodName(type));
+ return getBlock(getCurrentMapping().getMethodName(type));
}
-
+
public JBlock getSetupBlock(){
return getBlock(getCurrentMapping().getMethodName(BlockType.SETUP));
}
@@ -148,17 +149,17 @@ public class ClassGenerator<T>{
public JBlock getCleanupBlock(){
return getBlock(getCurrentMapping().getMethodName(BlockType.CLEANUP));
}
-
+
public JVar declareVectorValueSetupAndMember(String batchName, TypedFieldId fieldId){
return declareVectorValueSetupAndMember( DirectExpression.direct(batchName), fieldId);
}
public JVar declareVectorValueSetupAndMember(DirectExpression batchName, TypedFieldId fieldId){
final ValueVectorSetup setup = new ValueVectorSetup(batchName, fieldId);
- JVar var = this.vvDeclaration.get(setup);
- if(var != null) return var;
-
- Class<?> valueVectorClass = TypeHelper.getValueVectorClass(fieldId.getType().getMinorType(), fieldId.getType().getMode());
+// JVar var = this.vvDeclaration.get(setup);
+// if(var != null) return var;
+
+ Class<?> valueVectorClass = fieldId.getIntermediateClass();
JClass vvClass = model.ref(valueVectorClass);
JClass retClass = vvClass;
String vectorAccess = "getValueVector";
@@ -166,48 +167,56 @@ public class ClassGenerator<T>{
retClass = retClass.array();
vectorAccess = "getValueVectors";
}
-
+
JVar vv = declareClassField("vv", retClass);
JClass t = model.ref(SchemaChangeException.class);
JType objClass = model.ref(Object.class);
JBlock b = getSetupBlock();
+ //JExpr.newArray(model.INT).
+
+ JVar fieldArr = b.decl(model.INT.array(), "fieldIds" + index++, JExpr.newArray(model.INT, fieldId.getFieldIds().length));
+ int[] fieldIndices = fieldId.getFieldIds();
+ for(int i = 0; i < fieldIndices.length; i++){
+ b.assign(fieldArr.component(JExpr.lit(i)), JExpr.lit(fieldIndices[i]));
+ }
+
+ JInvocation invoke = batchName
+ .invoke("getValueAccessorById") //
+ .arg( vvClass.dotclass())
+ .arg(fieldArr);
+
JVar obj = b.decl( //
objClass, //
- getNextVar("tmp"), //
- batchName
- .invoke("getValueAccessorById") //
- .arg(JExpr.lit(fieldId.getFieldId())) //
- .arg( vvClass.dotclass())
- .invoke(vectorAccess)//
- );
-
-
+ getNextVar("tmp"), //
+ invoke.invoke(vectorAccess));
+
+
b._if(obj.eq(JExpr._null()))._then()._throw(JExpr._new(t).arg(JExpr.lit(String.format("Failure while loading vector %s with id: %s.", vv.name(), fieldId.toString()))));
//b.assign(vv, JExpr.cast(retClass, ((JExpression) JExpr.cast(wrapperClass, obj) ).invoke(vectorAccess)));
b.assign(vv, JExpr.cast(retClass, obj ));
vvDeclaration.put(setup, vv);
-
+
return vv;
}
public HoldingContainer addExpr(LogicalExpression ex){
return addExpr(ex, true);
}
-
+
public HoldingContainer addExpr(LogicalExpression ex, boolean rotate){
// logger.debug("Adding next write {}", ex);
if(rotate) rotateBlock();
return evaluationVisitor.addExpr(ex, this);
}
-
+
public void rotateBlock(){
for(LinkedList<JBlock> b : blocks){
b.add(new JBlock(true, true));
}
}
-
-
+
+
void flushCode(){
int i =0;
for(CodeGeneratorMethod method : sig){
@@ -219,19 +228,19 @@ public class ClassGenerator<T>{
m._throws(model.ref(c));
}
m._throws(SchemaChangeException.class);
-
+
for(JBlock b : blocks[i++]){
if(!b.isEmpty()) m.body().add(b);
}
-
+
}
-
+
for(ClassGenerator<T> child : innerClasses.values()){
child.flushCode();
}
}
-
-
+
+
public JCodeModel getModel() {
return model;
}
@@ -239,11 +248,11 @@ public class ClassGenerator<T>{
public String getNextVar() {
return "v" + index++;
}
-
+
public String getNextVar(String prefix){
return prefix + index++;
}
-
+
public JVar declareClassField(String prefix, JType t){
return clazz.field(JMod.NONE, t, prefix + index++);
}
@@ -251,11 +260,11 @@ public class ClassGenerator<T>{
public JVar declareClassField(String prefix, JType t, JExpression init){
return clazz.field(JMod.NONE, t, prefix + index++, init);
}
-
+
public HoldingContainer declare(MajorType t){
return declare(t, true);
}
-
+
public HoldingContainer declare(MajorType t, boolean includeNewInstance){
JType holderType = getHolderType(t);
JVar var;
@@ -266,12 +275,12 @@ public class ClassGenerator<T>{
}
JFieldRef outputSet = null;
if(t.getMode() == DataMode.OPTIONAL){
- outputSet = var.ref("isSet");
+ outputSet = var.ref("isSet");
}
index++;
return new HoldingContainer(t, var, var.ref("value"), outputSet);
}
-
+
public List<TypedFieldId> getWorkspaceTypes() {
return this.workspaceTypes;
}
@@ -283,7 +292,7 @@ public class ClassGenerator<T>{
private static class ValueVectorSetup{
final DirectExpression batch;
final TypedFieldId fieldId;
-
+
public ValueVectorSetup(DirectExpression batch, TypedFieldId fieldId) {
super();
this.batch = batch;
@@ -321,35 +330,45 @@ public class ClassGenerator<T>{
return true;
}
-
+
}
-
-
+
+
public static class HoldingContainer{
private final JVar holder;
private final JFieldRef value;
private final JFieldRef isSet;
private final MajorType type;
private boolean isConstant;
-
+ private final boolean singularRepeated;
+
public HoldingContainer(MajorType t, JVar holder, JFieldRef value, JFieldRef isSet) {
+ this(t, holder, value, isSet, false);
+ }
+
+ public HoldingContainer(MajorType t, JVar holder, JFieldRef value, JFieldRef isSet, boolean singularRepeated) {
super();
this.holder = holder;
this.value = value;
this.isSet = isSet;
this.type = t;
this.isConstant = false;
+ this.singularRepeated = singularRepeated;
}
-
+
+ public boolean isSingularRepeated(){
+ return singularRepeated;
+ }
+
public HoldingContainer setConstant(boolean isConstant) {
this.isConstant = isConstant;
return this;
}
-
+
public boolean isConstant() {
return this.isConstant;
}
-
+
public JVar getHolder() {
return holder;
}
@@ -357,7 +376,7 @@ public class ClassGenerator<T>{
public JFieldRef getValue() {
return value;
}
-
+
public MajorType getMajorType(){
return type;
}
@@ -366,11 +385,11 @@ public class ClassGenerator<T>{
Preconditions.checkNotNull(isSet, "You cannot access the isSet variable when operating on a non-nullable output value.");
return isSet;
}
-
+
public boolean isOptional(){
return type.getMode() == DataMode.OPTIONAL;
}
-
+
public boolean isRepeated(){
return type.getMode() == DataMode.REPEATED;
}
@@ -379,7 +398,7 @@ public class ClassGenerator<T>{
return type.getMinorType();
}
}
-
+
public JType getHolderType(MajorType t){
return TypeHelper.getHolderType(model, t.getMinorType(), t.getMode());
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
index dce070f..d700bf3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java
@@ -20,8 +20,6 @@ package org.apache.drill.exec.expr;
import java.util.List;
import java.util.Set;
-import io.netty.buffer.ByteBuf;
-import com.google.common.collect.Lists;
import org.apache.drill.common.expression.CastExpression;
import org.apache.drill.common.expression.ConvertExpression;
import org.apache.drill.common.expression.FunctionCall;
@@ -29,25 +27,24 @@ import org.apache.drill.common.expression.FunctionHolderExpression;
import org.apache.drill.common.expression.IfExpression;
import org.apache.drill.common.expression.IfExpression.IfCondition;
import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.expression.TypedNullConstant;
-import org.apache.drill.common.expression.ValueExpressions;
import org.apache.drill.common.expression.ValueExpressions.BooleanExpression;
-import org.apache.drill.common.expression.ValueExpressions.DoubleExpression;
-import org.apache.drill.common.expression.ValueExpressions.LongExpression;
-import org.apache.drill.common.expression.ValueExpressions.IntExpression;
import org.apache.drill.common.expression.ValueExpressions.DateExpression;
-import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpression;
-import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
-import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
-import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
-import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
import org.apache.drill.common.expression.ValueExpressions.Decimal18Expression;
import org.apache.drill.common.expression.ValueExpressions.Decimal28Expression;
import org.apache.drill.common.expression.ValueExpressions.Decimal38Expression;
+import org.apache.drill.common.expression.ValueExpressions.Decimal9Expression;
+import org.apache.drill.common.expression.ValueExpressions.DoubleExpression;
+import org.apache.drill.common.expression.ValueExpressions.IntExpression;
+import org.apache.drill.common.expression.ValueExpressions.IntervalDayExpression;
+import org.apache.drill.common.expression.ValueExpressions.IntervalYearExpression;
+import org.apache.drill.common.expression.ValueExpressions.LongExpression;
import org.apache.drill.common.expression.ValueExpressions.QuotedString;
+import org.apache.drill.common.expression.ValueExpressions.TimeExpression;
+import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression;
import org.apache.drill.common.expression.visitors.AbstractExprVisitor;
-import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
@@ -60,6 +57,8 @@ import org.apache.drill.exec.expr.fn.HiveFuncHolder;
import org.apache.drill.exec.physical.impl.filter.ReturnValueExpression;
import org.apache.drill.exec.record.NullExpression;
import org.apache.drill.exec.vector.ValueHolderHelper;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.FieldWriter;
import com.google.common.collect.Lists;
import com.sun.codemodel.JBlock;
@@ -68,6 +67,7 @@ import com.sun.codemodel.JConditional;
import com.sun.codemodel.JExpr;
import com.sun.codemodel.JExpression;
import com.sun.codemodel.JInvocation;
+import com.sun.codemodel.JLabel;
import com.sun.codemodel.JType;
import com.sun.codemodel.JVar;
@@ -88,7 +88,7 @@ public class EvaluationVisitor {
private class EvalVisitor extends AbstractExprVisitor<HoldingContainer, ClassGenerator<?>, RuntimeException> {
-
+
@Override
public HoldingContainer visitFunctionCall(FunctionCall call, ClassGenerator<?> generator) throws RuntimeException {
throw new UnsupportedOperationException("FunctionCall is not expected here. "+
@@ -143,18 +143,18 @@ public class EvaluationVisitor {
JConditional jc = null;
JBlock conditionalBlock = new JBlock(false, false);
for (IfCondition c : ifExpr.conditions) {
- HoldingContainer HoldingContainer = c.condition.accept(this, generator);
+ HoldingContainer holdingContainer = c.condition.accept(this, generator);
if (jc == null) {
- if (HoldingContainer.isOptional()) {
- jc = conditionalBlock._if(HoldingContainer.getIsSet().cand(HoldingContainer.getValue()));
+ if (holdingContainer.isOptional()) {
+ jc = conditionalBlock._if(holdingContainer.getIsSet().cand(holdingContainer.getValue()));
} else {
- jc = conditionalBlock._if(HoldingContainer.getValue().eq(JExpr.lit(1)));
+ jc = conditionalBlock._if(holdingContainer.getValue().eq(JExpr.lit(1)));
}
} else {
- if (HoldingContainer.isOptional()) {
- jc = jc._else()._if(HoldingContainer.getIsSet().cand(HoldingContainer.getValue()));
+ if (holdingContainer.isOptional()) {
+ jc = jc._else()._if(holdingContainer.getIsSet().cand(holdingContainer.getValue()));
} else {
- jc = jc._else()._if(HoldingContainer.getValue());
+ jc = jc._else()._if(holdingContainer.getValue());
}
}
@@ -184,14 +184,14 @@ public class EvaluationVisitor {
return output;
}
-
+
@Override
public HoldingContainer visitSchemaPath(SchemaPath path, ClassGenerator<?> generator) throws RuntimeException {
throw new UnsupportedOperationException("All schema paths should have been replaced with ValueVectorExpressions.");
}
@Override
- public HoldingContainer visitLongConstant(LongExpression e, ClassGenerator<?> generator) throws RuntimeException {
+ public HoldingContainer visitLongConstant(LongExpression e, ClassGenerator<?> generator) throws RuntimeException {
HoldingContainer out = generator.declare(e.getMajorType());
generator.getEvalBlock().assign(out.getValue(), JExpr.lit(e.getLong()));
return out;
@@ -269,81 +269,180 @@ public class EvaluationVisitor {
private HoldingContainer visitValueVectorWriteExpression(ValueVectorWriteExpression e, ClassGenerator<?> generator) {
- LogicalExpression child = e.getChild();
- HoldingContainer inputContainer = child.accept(this, generator);
+ final LogicalExpression child = e.getChild();
+ final HoldingContainer inputContainer = child.accept(this, generator);
+ final boolean complex = Types.isComplex(inputContainer.getMajorType());
+
JBlock block = generator.getEvalBlock();
JExpression outIndex = generator.getMappingSet().getValueWriteIndex();
JVar vv = generator.declareVectorValueSetupAndMember(generator.getMappingSet().getOutgoing(), e.getFieldId());
- String setMethod = e.isSafe() ? "setSafe" : "set";
-
- JInvocation setMeth;
- if (Types.usesHolderForGet(inputContainer.getMajorType())) {
- setMeth = vv.invoke("getMutator").invoke(setMethod).arg(outIndex).arg(inputContainer.getHolder());
- }else{
- setMeth = vv.invoke("getMutator").invoke(setMethod).arg(outIndex).arg(inputContainer.getValue());
- }
-
- if(e.isSafe()){
- HoldingContainer outputContainer = generator.declare(Types.REQUIRED_BIT);
- block.assign(outputContainer.getValue(), JExpr.lit(1));
- if(inputContainer.isOptional()){
-// block._if(vv.invoke("getMutator").invoke(setMethod).arg(outIndex).not())._then().assign(outputContainer.getValue(), JExpr.lit(0));
- JConditional jc = block._if(inputContainer.getIsSet().eq(JExpr.lit(0)).not());
- block = jc._then();
+
+ if(complex){
+ JType writerImpl = generator.getModel()._ref(TypeHelper.getWriterImpl(inputContainer.getMinorType(), inputContainer.getMajorType().getMode()));
+ JType writerIFace = generator.getModel()._ref(TypeHelper.getWriterInterface(inputContainer.getMinorType(), inputContainer.getMajorType().getMode()));
+ JVar writer = generator.declareClassField("writer", writerIFace);
+ generator.getSetupBlock().assign(writer, JExpr._new(writerImpl).arg(vv).arg(JExpr._null()));
+ generator.getEvalBlock().add(writer.invoke("setPosition").arg(outIndex));
+ String copyMethod = inputContainer.isSingularRepeated() ? "copyAsValueSingle" : "copyAsValue";
+ generator.getEvalBlock().add(inputContainer.getHolder().invoke(copyMethod).arg(writer));
+ if(e.isSafe()){
+ HoldingContainer outputContainer = generator.declare(Types.REQUIRED_BIT);
+ JConditional ifOut = generator.getEvalBlock()._if(writer.invoke("ok"));
+ ifOut._then().assign(outputContainer.getValue(), JExpr.lit(1));
+ ifOut._else().assign(outputContainer.getValue(), JExpr.lit(0));
+ return outputContainer;
}
- block._if(setMeth.not())._then().assign(outputContainer.getValue(), JExpr.lit(0));
- return outputContainer;
}else{
- if (inputContainer.isOptional()) {
-// block.add(vv.invoke("getMutator").invoke(setMethod).arg(outIndex));
- JConditional jc = block._if(inputContainer.getIsSet().eq(JExpr.lit(0)).not());
- block = jc._then();
+ String setMethod = e.isSafe() ? "setSafe" : "set";
+
+ JInvocation setMeth;
+ if (Types.usesHolderForGet(inputContainer.getMajorType())) {
+ setMeth = vv.invoke("getMutator").invoke(setMethod).arg(outIndex).arg(inputContainer.getHolder());
+ }else{
+ setMeth = vv.invoke("getMutator").invoke(setMethod).arg(outIndex).arg(inputContainer.getValue());
+ }
+
+ if(e.isSafe()){
+ HoldingContainer outputContainer = generator.declare(Types.REQUIRED_BIT);
+ block.assign(outputContainer.getValue(), JExpr.lit(1));
+ if(inputContainer.isOptional()){
+// block._if(vv.invoke("getMutator").invoke(setMethod).arg(outIndex).not())._then().assign(outputContainer.getValue(), JExpr.lit(0));
+ JConditional jc = block._if(inputContainer.getIsSet().eq(JExpr.lit(0)).not());
+ block = jc._then();
+ }
+ block._if(setMeth.not())._then().assign(outputContainer.getValue(), JExpr.lit(0));
+ return outputContainer;
+ }else{
+ if (inputContainer.isOptional()) {
+// block.add(vv.invoke("getMutator").invoke(setMethod).arg(outIndex));
+ JConditional jc = block._if(inputContainer.getIsSet().eq(JExpr.lit(0)).not());
+ block = jc._then();
+ }
+ block.add(setMeth);
}
- block.add(setMeth);
+
}
-
+
+
return null;
}
private HoldingContainer visitValueVectorReadExpression(ValueVectorReadExpression e, ClassGenerator<?> generator)
throws RuntimeException {
// declare value vector
-
- JVar vv1 = generator.declareVectorValueSetupAndMember(generator.getMappingSet().getIncoming(), e.getFieldId());
+
+ JExpression vv1 = generator.declareVectorValueSetupAndMember(generator.getMappingSet().getIncoming(), e.getFieldId());
JExpression indexVariable = generator.getMappingSet().getValueReadIndex();
- JInvocation getValueAccessor = vv1.invoke("getAccessor").invoke("get");
- JInvocation getValueAccessor2 = vv1.invoke("getAccessor");
+ JExpression componentVariable = indexVariable.shrz(JExpr.lit(16));
if (e.isSuperReader()) {
-
- getValueAccessor = ((JExpression) vv1.component(indexVariable.shrz(JExpr.lit(16)))).invoke("getAccessor").invoke("get");
- getValueAccessor2 = ((JExpression) vv1.component(indexVariable.shrz(JExpr.lit(16)))).invoke("getAccessor");
+ vv1 = ((JExpression) vv1.component(componentVariable));
indexVariable = indexVariable.band(JExpr.lit((int) Character.MAX_VALUE));
}
// evaluation work.
HoldingContainer out = generator.declare(e.getMajorType());
+ final boolean primitive = !Types.usesHolderForGet(e.getMajorType());
+ final boolean hasReadPath = e.hasReadPath();
+ final boolean complex = Types.isComplex(e.getMajorType());
- if (out.isOptional()) {
- JBlock blk = generator.getEvalBlock();
- blk.assign(out.getIsSet(), getValueAccessor2.invoke("isSet").arg(indexVariable));
- JConditional jc = blk._if(out.getIsSet().eq(JExpr.lit(1)));
- if (Types.usesHolderForGet(e.getMajorType())) {
- jc._then().add(getValueAccessor.arg(indexVariable).arg(out.getHolder()));
- } else {
- jc._then().assign(out.getValue(), getValueAccessor.arg(indexVariable));
+ int[] fieldIds = e.getFieldId().getFieldIds();
+ for(int i = 1; i < fieldIds.length; i++){
+
+ }
+
+ if(!hasReadPath && !complex){
+
+ JInvocation getValueAccessor = vv1.invoke("getAccessor").invoke("get");
+ JInvocation getValueAccessor2 = vv1.invoke("getAccessor");
+ JBlock eval = new JBlock();
+
+ if(primitive){
+ eval.assign(out.getValue(), getValueAccessor.arg(indexVariable));
+ }else{
+ eval.add(getValueAccessor.arg(indexVariable).arg(out.getHolder()));
}
- } else {
- if (Types.usesHolderForGet(e.getMajorType())) {
- if (e.isArrayElement()) {
- generator.getEvalBlock().add(getValueAccessor.arg(indexVariable).arg(JExpr.lit(e.getIndex())).arg(out.getHolder()));
- } else {
- generator.getEvalBlock().add(getValueAccessor.arg(indexVariable).arg(out.getHolder()));
+
+ if (out.isOptional()) {
+ JBlock blk = generator.getEvalBlock();
+ blk.assign(out.getIsSet(), getValueAccessor2.invoke("isSet").arg(indexVariable));
+ JConditional jc = blk._if(out.getIsSet().eq(JExpr.lit(1)));
+ jc._then().add(eval);
+ }else{
+ generator.getEvalBlock().add(eval);
+ }
+
+ }else{
+ JExpression vector = e.isSuperReader() ? vv1.component(componentVariable) : vv1;
+ JExpression expr = vector.invoke("getAccessor").invoke("getReader");
+
+ JLabel label = generator.getEvalBlock().label("complex");
+ JBlock eval = generator.getEvalBlock().block();
+
+ // position to the correct value.
+ eval.add(expr.invoke("setPosition").arg(indexVariable));
+ PathSegment seg = e.getReadPath();
+ int listNum = 0;
+ boolean lastWasArray = false;
+ while(true){
+ if(seg.isArray()){
+ lastWasArray = true;
+
+ if(seg.isLastPath() && !complex) break;
+
+ JVar list = generator.declareClassField("list", generator.getModel()._ref(FieldReader.class));
+ generator.getSetupBlock().assign(list, expr);
+ expr = list;
+
+ // if this is an array, set a single position for the expression to allow us to read the right data lower down.
+ JVar desiredIndex = eval.decl(generator.getModel().INT, "desiredIndex" + listNum, JExpr.lit(seg.getArraySegment().getIndex()));
+ // start with negative one so that we are at zero after first call to next.
+ JVar currentIndex = eval.decl(generator.getModel().INT, "currentIndex" + listNum, JExpr.lit(-1));
+
+ eval._while( //
+ currentIndex.lt(desiredIndex) //
+ .cand(expr.invoke("next")) ).body().assign(currentIndex, currentIndex.plus(JExpr.lit(1)));
+
+ JBlock ifNoVal = eval._if(desiredIndex.ne(currentIndex))._then().block();
+ if(!complex) ifNoVal.assign(out.getIsSet(), JExpr.lit(0));
+ ifNoVal._break(label);
+
+ listNum++;
+
+ }else{
+ lastWasArray = false;
+ JExpression fieldName = JExpr.lit(seg.getNameSegment().getPath());
+ expr = expr.invoke("reader").arg(fieldName);
+ }
+ seg = seg.getChild();
+
+ // stop once we get to last column or when the segment is an array at the end of the reference.
+ if(seg == null || seg.isLastPath() && seg.isArray()) break;
+ }
+ MajorType secondaryType = e.getFieldId().getSecondaryFinal();
+ JType readerImpl = generator.getModel()._ref(TypeHelper.getReaderClassName(secondaryType.getMinorType(), secondaryType.getMode()));
+ JVar complexReader = generator.declareClassField("reader", readerImpl);
+ generator.getSetupBlock().assign(complexReader, JExpr.cast(readerImpl, expr));
+ expr = complexReader;
+
+ if(complex){
+ HoldingContainer hc = new HoldingContainer(e.getMajorType(), (JVar) expr, null, null, lastWasArray);
+ return hc;
+ //eval.assign(out.getHolder().ref("reader"), expr);
+ }else{
+ if(seg != null){
+ eval.add(expr.invoke("read").arg(JExpr.lit(seg.getArraySegment().getIndex())).arg(out.getHolder()));
+ }else{
+
+ eval.add(expr.invoke("read").arg(out.getHolder()));
}
- } else {
- generator.getEvalBlock().assign(out.getValue(), getValueAccessor.arg(indexVariable));
}
+
}
+
+
+
+
return out;
}
@@ -352,11 +451,11 @@ public class EvaluationVisitor {
// Preconditions.checkArgument(child.getMajorType().equals(Types.REQUIRED_BOOLEAN));
HoldingContainer hc = child.accept(this, generator);
if(e.isReturnTrueOnOne()){
- generator.getEvalBlock()._return(hc.getValue().eq(JExpr.lit(1)));
+ generator.getEvalBlock()._return(hc.getValue().eq(JExpr.lit(1)));
}else{
generator.getEvalBlock()._return(hc.getValue());
}
-
+
return null;
}
@@ -453,6 +552,7 @@ public class EvaluationVisitor {
}
}
+
private class ConstantFilter extends EvalVisitor {
private Set<LogicalExpression> constantBoundaries;
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
index 95d341b..fc7fb6a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ExpressionTreeMaterializer.java
@@ -69,7 +69,7 @@ import com.google.common.collect.Lists;
public class ExpressionTreeMaterializer {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExpressionTreeMaterializer.class);
-
+
private ExpressionTreeMaterializer() {
};
@@ -239,36 +239,12 @@ public class ExpressionTreeMaterializer {
@Override
public LogicalExpression visitSchemaPath(SchemaPath path, FunctionImplementationRegistry value) {
// logger.debug("Visiting schema path {}", path);
- PathSegment seg = path.getRootSegment();
- List<String> segments = Lists.newArrayList();
- segments.add(seg.getNameSegment().getPath().toString());
- boolean isArrayElement = false;
- int index = -1;
- while((seg = seg.getChild()) != null) {
- if (seg.isNamed()) {
- segments.add(seg.getNameSegment().getPath().toString());
- if (seg.isLastPath()) {
- break;
- }
- } else {
- if (!seg.isLastPath()) {
- throw new UnsupportedOperationException("Repeated map type not supported");
- }
- index = seg.getArraySegment().getIndex();
- isArrayElement = true;
- break;
- }
- }
- SchemaPath newPath = SchemaPath.getCompoundPath((String[]) segments.toArray(new String[0]));
- TypedFieldId tfId = batch.getValueVectorId(newPath);
+ TypedFieldId tfId = batch.getValueVectorId(path);
if (tfId == null) {
logger.warn("Unable to find value vector of path {}, returning null instance.", path);
return NullExpression.INSTANCE;
} else {
- ValueVectorReadExpression e = new ValueVectorReadExpression(tfId, index, isArrayElement);
- if (isArrayElement) {
- e.required();
- }
+ ValueVectorReadExpression e = new ValueVectorReadExpression(tfId);
return e;
}
}
@@ -361,15 +337,15 @@ public class ExpressionTreeMaterializer {
@Override
public LogicalExpression visitCastExpression(CastExpression e, FunctionImplementationRegistry value){
-
+
// if the cast is pointless, remove it.
LogicalExpression input = e.getInput().accept(this, value);
MajorType newMajor = e.getMajorType();
MinorType newMinor = input.getMajorType().getMinorType();
-
+
if(castEqual(e.getPosition(), newMajor, input.getMajorType())) return input; // don't do pointless cast.
-
+
if(newMinor == MinorType.LATE || newMinor == MinorType.NULL){
// if the type still isn't fully bound, leave as cast expression.
return new CastExpression(input, e.getMajorType(), e.getPosition());
@@ -391,10 +367,10 @@ public class ExpressionTreeMaterializer {
newArgs.add(new ValueExpressions.LongExpression(type.getScale(), null));
}
FunctionCall fc = new FunctionCall(castFuncWithType, newArgs, e.getPosition());
- return fc.accept(this, value);
+ return fc.accept(this, value);
}
}
-
+
private boolean castEqual(ExpressionPosition pos, MajorType from, MajorType to){
if(!from.getMinorType().equals(to.getMinorType())) return false;
switch(from.getMinorType()){
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
index 4ba503d..6e2809a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/ValueVectorReadExpression.java
@@ -21,56 +21,43 @@ import java.util.Iterator;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.visitors.ExprVisitor;
-import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.record.TypedFieldId;
import com.google.common.collect.Iterators;
-import javax.sound.sampled.FloatControl;
-
public class ValueVectorReadExpression implements LogicalExpression{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ValueVectorReadExpression.class);
- private MajorType type;
private final TypedFieldId fieldId;
- private final boolean superReader;
- private final int index;
- private final boolean isArrayElement;
-
-
- public ValueVectorReadExpression(TypedFieldId tfId, int index, boolean isArrayElement){
- this.type = tfId.getType();
+
+
+ public ValueVectorReadExpression(TypedFieldId tfId){
this.fieldId = tfId;
- this.superReader = tfId.isHyperReader();
- this.index = index;
- this.isArrayElement = isArrayElement;
}
- public void required() {
- type = Types.required(type.getMinorType());
+ public boolean hasReadPath(){
+ return fieldId.hasRemainder();
}
- public boolean isArrayElement() {
- return isArrayElement;
+ public PathSegment getReadPath(){
+ return fieldId.getRemainder();
}
- public ValueVectorReadExpression(TypedFieldId tfId) {
- this(tfId, -1, false);
- }
-
public TypedFieldId getTypedFieldId(){
return fieldId;
}
-
+
public boolean isSuperReader(){
- return superReader;
+ return fieldId.isHyperReader();
}
@Override
public MajorType getMajorType() {
- return type;
+ return fieldId.getFinalType();
}
@Override
@@ -82,10 +69,6 @@ public class ValueVectorReadExpression implements LogicalExpression{
return fieldId;
}
- public int getIndex() {
- return index;
- }
-
@Override
public ExpressionPosition getPosition() {
return ExpressionPosition.UNKNOWN;
@@ -95,6 +78,6 @@ public class ValueVectorReadExpression implements LogicalExpression{
public Iterator<LogicalExpression> iterator() {
return Iterators.emptyIterator();
}
-
-
+
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java.orig
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java.orig b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java.orig
new file mode 100644
index 0000000..7eeb730
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/HashFunctions.java.orig
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.fn.impl;
+
+import org.apache.drill.exec.expr.DrillSimpleFunc;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate;
+import org.apache.drill.exec.expr.annotations.FunctionTemplate.FunctionScope;
+import org.apache.drill.exec.expr.annotations.Output;
+import org.apache.drill.exec.expr.annotations.Param;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.exec.record.RecordBatch;
+
+public class HashFunctions {
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class NullableFloatHash implements DrillSimpleFunc {
+
+ @Param NullableFloat4Holder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(Float.floatToIntBits(in.value)).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class FloatHash implements DrillSimpleFunc {
+
+ @Param Float4Holder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(Float.floatToIntBits(in.value)).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class NullableDoubleHash implements DrillSimpleFunc {
+
+ @Param NullableFloat8Holder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(Double.doubleToLongBits(in.value)).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class DoubleHash implements DrillSimpleFunc {
+
+ @Param Float8Holder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(Double.doubleToLongBits(in.value)).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class NullableVarBinaryHash implements DrillSimpleFunc {
+
+ @Param NullableVarBinaryHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class NullableVarCharHash implements DrillSimpleFunc {
+
+ @Param NullableVarCharHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL )
+ public static class NullableVar16CharHash implements DrillSimpleFunc {
+
+ @Param NullableVar16CharHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class NullableBigIntHash implements DrillSimpleFunc {
+
+ @Param NullableBigIntHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ // TODO: implement hash function for other types
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class NullableIntHash implements DrillSimpleFunc {
+ @Param NullableIntHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ // TODO: implement hash function for other types
+ if (in.isSet == 0)
+ out.value = 0;
+ else
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(in.value).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class VarBinaryHash implements DrillSimpleFunc {
+
+ @Param VarBinaryHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class VarCharHash implements DrillSimpleFunc {
+
+ @Param VarCharHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
+ }
+
+<<<<<<< HEAD
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class Var16CharHash implements DrillSimpleFunc {
+
+ @Param Var16CharHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ out.value = org.apache.drill.exec.expr.fn.impl.HashHelper.hash(in.buffer.nioBuffer(in.start, in.end - in.start), 0);
+ }
+ }
+
+=======
+>>>>>>> 450e9e0... Support Complex Types
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class HashBigInt implements DrillSimpleFunc {
+
+ @Param BigIntHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ // TODO: implement hash function for other types
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashLong(in.value).asInt();
+ }
+ }
+
+ @FunctionTemplate(name = "hash", scope = FunctionScope.SIMPLE, nulls = FunctionTemplate.NullHandling.INTERNAL)
+ public static class IntHash implements DrillSimpleFunc {
+ @Param IntHolder in;
+ @Output IntHolder out;
+
+ public void setup(RecordBatch incoming) {
+ }
+
+ public void eval() {
+ // TODO: implement hash function for other types
+ out.value = com.google.common.hash.Hashing.murmur3_128().hashInt(in.value).asInt();
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/ComplexHolder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/ComplexHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/ComplexHolder.java
new file mode 100644
index 0000000..e1025df
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/ComplexHolder.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.holders;
+
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+public class ComplexHolder implements ValueHolder {
+ public FieldReader reader;
+ public int isSet;
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
new file mode 100644
index 0000000..09746da
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedListHolder.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.holders;
+
+public final class RepeatedListHolder implements ValueHolder{
+ public int start;
+ public int end;
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
new file mode 100644
index 0000000..247f75e
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/holders/RepeatedMapHolder.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.expr.holders;
+
+public final class RepeatedMapHolder implements ValueHolder{
+ public int start;
+ public int end;
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryRuntimeException.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryRuntimeException.java b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryRuntimeException.java
new file mode 100644
index 0000000..305eabd
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/memory/OutOfMemoryRuntimeException.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.memory;
+
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+
+public class OutOfMemoryRuntimeException extends DrillRuntimeException{
+
+ public OutOfMemoryRuntimeException() {
+ super();
+
+ }
+
+ public OutOfMemoryRuntimeException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+
+ }
+
+ public OutOfMemoryRuntimeException(String message, Throwable cause) {
+ super(message, cause);
+
+ }
+
+ public OutOfMemoryRuntimeException(String message) {
+ super(message);
+
+ }
+
+ public OutOfMemoryRuntimeException(Throwable cause) {
+ super(cause);
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index 73ed723..a49d1a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -204,8 +204,8 @@ public class ScanBatch implements RecordBatch {
}
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return container.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
+ return container.getValueAccessorById(clazz, ids);
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
index e0e7e51..fc8c430 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/PriorityQueueTemplate.java
@@ -60,8 +60,8 @@ public abstract class PriorityQueueTemplate implements PriorityQueue {
BatchSchema schema = container.getSchema();
VectorContainer newContainer = new VectorContainer();
for (MaterializedField field : schema) {
- int id = container.getValueVectorId(field.getAsSchemaPath()).getFieldId();
- newContainer.add(container.getValueAccessorById(id, field.getValueClass()).getValueVectors());
+ int[] ids = container.getValueVectorId(field.getPath()).getFieldIds();
+ newContainer.add(container.getValueAccessorById(field.getValueClass(), ids).getValueVectors());
}
newContainer.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
this.hyperBatch = new ExpandableHyperContainer(newContainer);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index 2a57aaa..1c1a6d2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -104,8 +104,8 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
return sv4;
}
-
-
+
+
@Override
public void cleanup() {
if (sv4 != null) {
@@ -127,8 +127,8 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
return IterOutcome.NONE;
}
}
-
-
+
+
try{
outer: while (true) {
Stopwatch watch = new Stopwatch();
@@ -166,7 +166,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
throw new UnsupportedOperationException();
}
}
-
+
if (schema == null){
// builder may be null at this point if the first incoming batch is empty
return IterOutcome.NONE;
@@ -181,7 +181,7 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
container.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE);
return IterOutcome.OK_NEW_SCHEMA;
-
+
}catch(SchemaChangeException | ClassTransformationException | IOException ex){
kill();
logger.error("Failure during query", ex);
@@ -239,10 +239,10 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
CodeGenerator<PriorityQueue> cg = CodeGenerator.get(PriorityQueue.TEMPLATE_DEFINITION, context.getFunctionRegistry());
ClassGenerator<PriorityQueue> g = cg.getRoot();
g.setMappingSet(mainMapping);
-
+
for(Ordering od : orderings){
// first, we rewrite the evaluation stack for each side of the comparison.
- ErrorCollector collector = new ErrorCollectorImpl();
+ ErrorCollector collector = new ErrorCollectorImpl();
final LogicalExpression expr = ExpressionTreeMaterializer.materialize(od.getExpr(), batch, collector, context.getFunctionRegistry());
if(collector.hasErrors()) throw new SchemaChangeException("Failure while materializing expression. " + collector.toErrorString());
g.setMappingSet(leftMapping);
@@ -250,26 +250,26 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
g.setMappingSet(rightMapping);
HoldingContainer right = g.addExpr(expr, false);
g.setMappingSet(mainMapping);
-
+
// next we wrap the two comparison sides and add the expression block for the comparison.
LogicalExpression fh = FunctionGenerationHelper.getComparator(left, right, context.getFunctionRegistry());
HoldingContainer out = g.addExpr(fh, false);
JConditional jc = g.getEvalBlock()._if(out.getValue().ne(JExpr.lit(0)));
-
+
if(od.getDirection() == Direction.ASCENDING){
jc._then()._return(out.getValue());
}else{
jc._then()._return(out.getValue().minus());
}
}
-
+
g.getEvalBlock()._return(JExpr.lit(0));
PriorityQueue q = context.getImplementationClass(cg);
q.init(config.getLimit(), context, oContext.getAllocator(), schema.getSelectionVectorMode() == BatchSchema.SelectionVectorMode.TWO_BYTE);
return q;
}
-
+
@Override
public WritableBatch getWritableBatch() {
throw new UnsupportedOperationException("A sort batch is not writable.");
@@ -332,8 +332,8 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
}
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return container.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
+ return container.getValueAccessorById(clazz, ids);
}
@Override
@@ -355,6 +355,6 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
return container.iterator();
}
}
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WireRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WireRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WireRecordBatch.java
index 65669b1..cf3d75e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WireRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/WireRecordBatch.java
@@ -37,7 +37,7 @@ public class WireRecordBatch implements RecordBatch {
private FragmentContext context;
private BatchSchema schema;
-
+
public WireRecordBatch(FragmentContext context, RawFragmentBatchProvider fragProvider) throws OutOfMemoryException {
this.fragProvider = fragProvider;
this.context = context;
@@ -83,17 +83,17 @@ public class WireRecordBatch implements RecordBatch {
public TypedFieldId getValueVectorId(SchemaPath path) {
return batchLoader.getValueVectorId(path);
}
-
+
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return batchLoader.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
+ return batchLoader.getValueAccessorById(clazz, ids);
}
@Override
public IterOutcome next() {
try{
RawFragmentBatch batch = fragProvider.getNext();
-
+
// skip over empty batches. we do this since these are basically control messages.
while(batch != null && !batch.getHeader().getIsOutOfMemory() && batch.getHeader().getDef().getRecordCount() == 0){
batch = fragProvider.getNext();
@@ -107,7 +107,7 @@ public class WireRecordBatch implements RecordBatch {
if (batch.getHeader().getIsOutOfMemory()) {
return IterOutcome.OUT_OF_MEMORY;
}
-
+
// logger.debug("Next received batch {}", batch);
[07/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
index 396834c..4ff3708 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/WritableBatch.java
@@ -18,12 +18,14 @@
package org.apache.drill.exec.record;
import io.netty.buffer.ByteBuf;
+import io.netty.buffer.CompositeByteBuf;
import java.util.List;
-import io.netty.buffer.CompositeByteBuf;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
+import javax.jdo.metadata.FieldMetadata;
+
import org.apache.drill.exec.proto.UserBitShared.RecordBatchDef;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
import org.apache.drill.exec.vector.ValueVector;
@@ -63,7 +65,7 @@ public class WritableBatch {
Preconditions.checkState(!cleared,
"Attempted to reconstruct a container from a WritableBatch after it had been cleared");
if (buffers.length > 0) { /* If we have ByteBuf's associated with value vectors */
-
+
CompositeByteBuf cbb = new CompositeByteBuf(buffers[0].alloc(), true, buffers.length);
/* Copy data from each buffer into the compound buffer */
@@ -71,8 +73,7 @@ public class WritableBatch {
cbb.addComponent(buf);
}
-
- List<FieldMetadata> fields = def.getFieldList();
+ List<SerializedField> fields = def.getFieldList();
int bufferOffset = 0;
@@ -82,7 +83,7 @@ public class WritableBatch {
int vectorIndex = 0;
for (VectorWrapper<?> vv : container) {
- FieldMetadata fmd = fields.get(vectorIndex);
+ SerializedField fmd = fields.get(vectorIndex);
ValueVector v = vv.getValueVector();
ByteBuf bb = cbb.slice(bufferOffset, fmd.getBufferLength());
// v.load(fmd, cbb.slice(bufferOffset, fmd.getBufferLength()));
@@ -127,7 +128,7 @@ public class WritableBatch {
public static WritableBatch getBatchNoHV(int recordCount, Iterable<ValueVector> vectors, boolean isSV2) {
List<ByteBuf> buffers = Lists.newArrayList();
- List<FieldMetadata> metadata = Lists.newArrayList();
+ List<SerializedField> metadata = Lists.newArrayList();
for (ValueVector vv : vectors) {
metadata.add(vv.getMetadata());
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
index 872052c..04a9768 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONFormatPlugin.java
@@ -44,15 +44,15 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
public JSONFormatPlugin(String name, DrillbitContext context, DrillFileSystem fs, StoragePluginConfig storageConfig) {
this(name, context, fs, storageConfig, new JSONFormatConfig());
}
-
+
public JSONFormatPlugin(String name, DrillbitContext context, DrillFileSystem fs, StoragePluginConfig config, JSONFormatConfig formatPluginConfig) {
super(name, context, fs, config, formatPluginConfig, true, false, false, false, Lists.newArrayList("json"), "json");
}
-
+
@Override
- public RecordReader getRecordReader(FragmentContext context, FileWork fileWork,
+ public RecordReader getRecordReader(FragmentContext context, FileWork fileWork,
List<SchemaPath> columns) throws ExecutionSetupException {
- return new JSONRecordReader(context, fileWork.getPath(), this.getFileSystem().getUnderlying(), columns);
+ return new JSONRecordReader2(context, fileWork.getPath(), this.getFileSystem().getUnderlying(), columns);
}
@Override
@@ -78,6 +78,6 @@ public class JSONFormatPlugin extends EasyFormatPlugin<JSONFormatConfig> {
return true;
return false;
}
-
+
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
deleted file mode 100644
index 1c8539c..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
+++ /dev/null
@@ -1,532 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.easy.json;
-
-import static com.fasterxml.jackson.core.JsonToken.END_ARRAY;
-import static com.fasterxml.jackson.core.JsonToken.END_OBJECT;
-import static com.fasterxml.jackson.core.JsonToken.FIELD_NAME;
-
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.drill.common.exceptions.DrillRuntimeException;
-import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.common.expression.ExpressionPosition;
-import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.types.TypeProtos.MajorType;
-import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.common.types.Types;
-import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.expr.TypeHelper;
-import org.apache.drill.exec.expr.holders.NullableBitHolder;
-import org.apache.drill.exec.expr.holders.NullableFloat4Holder;
-import org.apache.drill.exec.expr.holders.NullableIntHolder;
-import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.memory.OutOfMemoryException;
-import org.apache.drill.exec.ops.FragmentContext;
-import org.apache.drill.exec.physical.impl.OutputMutator;
-import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.schema.DiffSchema;
-import org.apache.drill.exec.schema.Field;
-import org.apache.drill.exec.schema.NamedField;
-import org.apache.drill.exec.schema.ObjectSchema;
-import org.apache.drill.exec.schema.RecordSchema;
-import org.apache.drill.exec.schema.SchemaIdGenerator;
-import org.apache.drill.exec.schema.json.jackson.JacksonHelper;
-import org.apache.drill.exec.store.RecordReader;
-import org.apache.drill.exec.store.VectorHolder;
-import org.apache.drill.exec.vector.*;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonToken;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
-public class JSONRecordReader implements RecordReader {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONRecordReader.class);
- private static final int DEFAULT_LENGTH = 4000;
- public static final Charset UTF_8 = Charset.forName("UTF-8");
-
- private final Map<String, VectorHolder> valueVectorMap;
- private final FileSystem fileSystem;
- private final Path hadoopPath;
-
- private JsonParser parser;
- private SchemaIdGenerator generator;
- private DiffSchema diffSchema;
- private RecordSchema currentSchema;
- private List<Field> removedFields;
- private OutputMutator outputMutator;
- private int batchSize;
- private final List<SchemaPath> columns;
-
- public JSONRecordReader(FragmentContext fragmentContext, String inputPath, FileSystem fileSystem, int batchSize,
- List<SchemaPath> columns) throws OutOfMemoryException {
- this.hadoopPath = new Path(inputPath);
- this.fileSystem = fileSystem;
- this.batchSize = batchSize;
- valueVectorMap = Maps.newHashMap();
- this.columns = columns;
- }
-
- public JSONRecordReader(FragmentContext fragmentContext, String inputPath, FileSystem fileSystem,
- List<SchemaPath> columns) throws OutOfMemoryException {
- this(fragmentContext, inputPath, fileSystem, DEFAULT_LENGTH, columns);
- }
-
- private JsonParser getParser() {
- return parser;
- }
-
- @Override
- public void setup(OutputMutator output) throws ExecutionSetupException {
- outputMutator = output;
- output.removeAllFields();
- currentSchema = new ObjectSchema();
- diffSchema = new DiffSchema();
- removedFields = Lists.newArrayList();
-
- try {
- JsonFactory factory = new JsonFactory();
- parser = factory.createJsonParser(fileSystem.open(hadoopPath));
- parser.nextToken(); // Read to the first START_OBJECT token
- generator = new SchemaIdGenerator();
- } catch (IOException e) {
- throw new ExecutionSetupException(e);
- }
- }
-
- @Override
- public int next() {
- if (parser.isClosed() || !parser.hasCurrentToken()) {
- return 0;
- }
-
- resetBatch();
-
- int nextRowIndex = 0;
-
- try {
- while (ReadType.OBJECT.readRecord(this, null, nextRowIndex++, 0)) {
- parser.nextToken(); // Read to START_OBJECT token
-
- if (!parser.hasCurrentToken()) {
- parser.close();
- break;
- }
- }
-
- parser.nextToken();
-
- if (!parser.hasCurrentToken()) {
- parser.close();
- }
-
- // Garbage collect fields never referenced in this batch
- for (Field field : Iterables.concat(currentSchema.removeUnreadFields(), removedFields)) {
- diffSchema.addRemovedField(field);
- outputMutator.removeField(field.getAsMaterializedField());
- }
-
- if (diffSchema.isChanged()) {
- outputMutator.setNewSchema();
- }
-
-
- } catch (IOException | SchemaChangeException e) {
- logger.error("Error reading next in Json reader", e);
- throw new DrillRuntimeException(e);
- }
-
- for (VectorHolder holder : valueVectorMap.values()) {
- if (holder.isRepeated()) {
- holder.setGroupCount(nextRowIndex);
- }
- holder.getValueVector().getMutator().setValueCount(nextRowIndex);
- }
-
- return nextRowIndex;
- }
-
- private void resetBatch() {
- for (VectorHolder value : valueVectorMap.values()) {
- value.reset();
- }
-
- currentSchema.resetMarkedFields();
- diffSchema.reset();
- removedFields.clear();
- }
-
- @Override
- public void cleanup() {
- try {
- parser.close();
- } catch (IOException e) {
- logger.warn("Error closing Json parser", e);
- }
- }
-
-
- private RecordSchema getCurrentSchema() {
- return currentSchema;
- }
-
- private void setCurrentSchema(RecordSchema schema) {
- currentSchema = schema;
- }
-
- private List<Field> getRemovedFields() {
- return removedFields;
- }
-
- private boolean fieldSelected(String field){
-
- SchemaPath sp = SchemaPath.getCompoundPath(field.split("\\."));
- if (this.columns != null && this.columns.size() > 0){
- for (SchemaPath expr : this.columns){
- if ( sp.equals(expr)){
- return true;
- }
- }
- return false;
- }
- return true;
- }
-
- public static enum ReadType {
- ARRAY(END_ARRAY) {
- @Override
- public Field createField(RecordSchema parentSchema, String prefixFieldName, String fieldName, MajorType fieldType, int index) {
- return new NamedField(parentSchema, prefixFieldName, fieldName, fieldType);
- }
-
- @Override
- public RecordSchema createSchema() throws IOException {
- return new ObjectSchema();
- }
- },
- OBJECT(END_OBJECT) {
- @Override
- public Field createField(RecordSchema parentSchema,
- String prefixFieldName,
- String fieldName,
- MajorType fieldType,
- int index) {
- return new NamedField(parentSchema, prefixFieldName, fieldName, fieldType);
- }
-
- @Override
- public RecordSchema createSchema() throws IOException {
- return new ObjectSchema();
- }
- };
-
- private final JsonToken endObject;
-
- ReadType(JsonToken endObject) {
- this.endObject = endObject;
- }
-
- public JsonToken getEndObject() {
- return endObject;
- }
-
- @SuppressWarnings("ConstantConditions")
- public boolean readRecord(JSONRecordReader reader,
- String prefixFieldName,
- int rowIndex,
- int groupCount) throws IOException, SchemaChangeException {
- JsonParser parser = reader.getParser();
- JsonToken token = parser.nextToken();
- JsonToken endObject = getEndObject();
- int colIndex = 0;
- boolean isFull = false;
- while (token != endObject) {
- if (token == FIELD_NAME) {
- token = parser.nextToken();
- continue;
- }
-
- String fieldName = parser.getCurrentName();
- if ( fieldName != null && ! reader.fieldSelected(fieldName)){
- // this field was not requested in the query
- token = parser.nextToken();
- colIndex += 1;
- continue;
- }
- MajorType fieldType = JacksonHelper.getFieldType(token, this == ReadType.ARRAY);
- ReadType readType = null;
- switch (token) {
- case START_ARRAY:
- readType = ReadType.ARRAY;
- groupCount++;
- break;
- case START_OBJECT:
- readType = ReadType.OBJECT;
- groupCount = 0;
- break;
- }
-
- if (fieldType != null) { // Including nulls
- boolean currentFieldFull = !recordData(
- readType,
- reader,
- fieldType,
- prefixFieldName,
- fieldName,
- rowIndex,
- colIndex,
- groupCount);
- if(readType == ReadType.ARRAY) {
- groupCount--;
- }
- isFull = isFull || currentFieldFull;
- }
- token = parser.nextToken();
- colIndex += 1;
- }
- return !isFull;
- }
-
- private void removeChildFields(List<Field> removedFields, Field field) {
- RecordSchema schema = field.getAssignedSchema();
- if (schema == null) {
- return;
- }
- for (Field childField : schema.getFields()) {
- removedFields.add(childField);
- if (childField.hasSchema()) {
- removeChildFields(removedFields, childField);
- }
- }
- }
-
- private boolean recordData(JSONRecordReader.ReadType readType,
- JSONRecordReader reader,
- MajorType fieldType,
- String prefixFieldName,
- String fieldName,
- int rowIndex,
- int colIndex,
- int groupCount) throws IOException, SchemaChangeException {
- RecordSchema currentSchema = reader.getCurrentSchema();
- Field field = currentSchema.getField(fieldName == null ? prefixFieldName : fieldName, colIndex);
- boolean isFieldFound = field != null;
- List<Field> removedFields = reader.getRemovedFields();
- boolean newFieldLateBound = fieldType.getMinorType().equals(MinorType.LATE);
-
- if (isFieldFound && !field.getFieldType().equals(fieldType)) {
- boolean existingFieldLateBound = field.getFieldType().getMinorType().equals(MinorType.LATE);
-
- if (newFieldLateBound && !existingFieldLateBound) {
- fieldType = Types.overrideMinorType(fieldType, field.getFieldType().getMinorType());
- } else if (!newFieldLateBound && existingFieldLateBound) {
- field.setFieldType(Types.overrideMinorType(field.getFieldType(), fieldType.getMinorType()));
- } else if (!newFieldLateBound && !existingFieldLateBound) {
- if (field.hasSchema()) {
- removeChildFields(removedFields, field);
- }
- removedFields.add(field);
- currentSchema.removeField(field, colIndex);
-
- isFieldFound = false;
- }
- }
-
- if (!isFieldFound) {
- field = createField(
- currentSchema,
- prefixFieldName,
- fieldName,
- fieldType,
- colIndex
- );
-
- reader.recordNewField(field);
- currentSchema.addField(field);
- }
-
- field.setRead(true);
-
- VectorHolder holder = getOrCreateVectorHolder(reader, field);
- if (readType != null) {
- RecordSchema fieldSchema = field.getAssignedSchema();
- RecordSchema newSchema = readType.createSchema();
-
- if (readType != ReadType.ARRAY) {
- reader.setCurrentSchema(fieldSchema);
- if (fieldSchema == null) reader.setCurrentSchema(newSchema);
- readType.readRecord(reader, field.getFullFieldName(), rowIndex, groupCount);
- } else {
- readType.readRecord(reader, field.getFullFieldName(), rowIndex, groupCount);
- }
-
- reader.setCurrentSchema(currentSchema);
-
- } else if (holder != null && !newFieldLateBound && fieldType.getMinorType() != MinorType.LATE) {
- return addValueToVector(
- rowIndex,
- holder,
- JacksonHelper.getValueFromFieldType(
- reader.getParser(),
- fieldType.getMinorType()
- ),
- fieldType.getMinorType(),
- groupCount
- );
- }
-
- return true;
- }
-
- private static <T> boolean addValueToVector(int index, VectorHolder holder, T val, MinorType minorType, int groupCount) {
- switch (minorType) {
- case BIGINT: {
- holder.incAndCheckLength(NullableIntHolder.WIDTH * 8 + 1);
- if (groupCount == 0) {
- if (val != null) {
- NullableBigIntVector int4 = (NullableBigIntVector) holder.getValueVector();
- NullableBigIntVector.Mutator m = int4.getMutator();
- m.set(index, (Long) val);
- }
- } else {
- if (val == null) {
- throw new UnsupportedOperationException("Nullable repeated int is not supported.");
- }
-
- RepeatedBigIntVector repeatedInt4 = (RepeatedBigIntVector) holder.getValueVector();
- RepeatedBigIntVector.Mutator m = repeatedInt4.getMutator();
- holder.setGroupCount(index);
- m.add(index, (Long) val);
- }
-
- return holder.hasEnoughSpace(NullableIntHolder.WIDTH * 8 + 1);
- }
- case FLOAT4: {
- holder.incAndCheckLength(NullableFloat4Holder.WIDTH * 8 + 1);
- if (groupCount == 0) {
- if (val != null) {
- NullableFloat4Vector float4 = (NullableFloat4Vector) holder.getValueVector();
- NullableFloat4Vector.Mutator m = float4.getMutator();
- m.set(index, (Float) val);
- }
- } else {
- if (val == null) {
- throw new UnsupportedOperationException("Nullable repeated float is not supported.");
- }
-
- RepeatedFloat4Vector repeatedFloat4 = (RepeatedFloat4Vector) holder.getValueVector();
- RepeatedFloat4Vector.Mutator m = repeatedFloat4.getMutator();
- holder.setGroupCount(index);
- m.add(index, (Float) val);
- }
- return holder.hasEnoughSpace(NullableFloat4Holder.WIDTH * 8 + 1);
- }
- case VARCHAR: {
- if (val == null) {
- return (index + 1) * 4 <= holder.getLength();
- } else {
- byte[] bytes = ((String) val).getBytes(UTF_8);
- int length = bytes.length;
- holder.incAndCheckLength(length);
- if (groupCount == 0) {
- NullableVarCharVector varLen4 = (NullableVarCharVector) holder.getValueVector();
- NullableVarCharVector.Mutator m = varLen4.getMutator();
- m.set(index, bytes);
- } else {
- RepeatedVarCharVector repeatedVarLen4 = (RepeatedVarCharVector) holder.getValueVector();
- RepeatedVarCharVector.Mutator m = repeatedVarLen4.getMutator();
- holder.setGroupCount(index);
- m.add(index, bytes);
- }
- return holder.hasEnoughSpace(length + 4 + 1);
- }
- }
- case BIT: {
- holder.incAndCheckLength(NullableBitHolder.WIDTH + 1);
- if (groupCount == 0) {
- if (val != null) {
- NullableBitVector bit = (NullableBitVector) holder.getValueVector();
- NullableBitVector.Mutator m = bit.getMutator();
- m.set(index, (Boolean) val ? 1 : 0);
- }
- } else {
- if (val == null) {
- throw new UnsupportedOperationException("Nullable repeated boolean is not supported.");
- }
-
- RepeatedBitVector repeatedBit = (RepeatedBitVector) holder.getValueVector();
- RepeatedBitVector.Mutator m = repeatedBit.getMutator();
- holder.setGroupCount(index);
- m.add(index, (Boolean) val ? 1 : 0);
- }
- return holder.hasEnoughSpace(NullableBitHolder.WIDTH + 1);
- }
- default:
- throw new DrillRuntimeException("Type not supported to add value. Type: " + minorType);
- }
- }
-
- private VectorHolder getOrCreateVectorHolder(JSONRecordReader reader, Field field) throws SchemaChangeException {
- return reader.getOrCreateVectorHolder(field);
- }
-
- public abstract RecordSchema createSchema() throws IOException;
-
- public abstract Field createField(RecordSchema parentSchema,
- String prefixFieldName,
- String fieldName,
- MajorType fieldType,
- int index);
- }
-
- private void recordNewField(Field field) {
- diffSchema.recordNewField(field);
- }
-
- private VectorHolder getOrCreateVectorHolder(Field field) throws SchemaChangeException {
- String fullFieldName = field.getFullFieldName();
- VectorHolder holder = valueVectorMap.get(fullFieldName);
-
- if (holder == null) {
- MajorType type = field.getFieldType();
- MinorType minorType = type.getMinorType();
-
- if (minorType.equals(MinorType.MAP) || minorType.equals(MinorType.LATE)) {
- return null;
- }
-
- MaterializedField f = MaterializedField.create(SchemaPath.getCompoundPath(fullFieldName.split("\\.")), type);
-
- ValueVector v = outputMutator.addField(f, TypeHelper.getValueVectorClass(minorType, type.getMode()));
- AllocationHelper.allocate(v, batchSize, 50);
- holder = new VectorHolder(v);
- valueVectorMap.put(fullFieldName, holder);
- return holder;
- }
- return holder;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader2.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader2.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader2.java
new file mode 100644
index 0000000..bb52a20
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader2.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.easy.json;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.memory.OutOfMemoryException;
+import org.apache.drill.exec.ops.FragmentContext;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.store.RecordReader;
+import org.apache.drill.exec.vector.complex.fn.JsonReader;
+import org.apache.drill.exec.vector.complex.fn.JsonRecordSplitter;
+import org.apache.drill.exec.vector.complex.fn.UTF8JsonRecordSplitter;
+import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import com.google.hive12.common.collect.Lists;
+
+public class JSONRecordReader2 implements RecordReader{
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONRecordReader2.class);
+
+ private OutputMutator mutator;
+ private VectorContainerWriter writer;
+ private Path hadoopPath;
+ private FileSystem fileSystem;
+ private InputStream stream;
+ private JsonReader jsonReader;
+
+ public JSONRecordReader2(FragmentContext fragmentContext, String inputPath, FileSystem fileSystem,
+ List<SchemaPath> columns) throws OutOfMemoryException {
+ this.hadoopPath = new Path(inputPath);
+ this.fileSystem = fileSystem;
+ }
+
+ @Override
+ public void setup(OutputMutator output) throws ExecutionSetupException {
+ try{
+ stream = fileSystem.open(hadoopPath);
+ JsonRecordSplitter splitter = new UTF8JsonRecordSplitter(stream);
+ this.writer = new VectorContainerWriter(output);
+ this.mutator = output;
+ jsonReader = new JsonReader(splitter);
+ }catch(IOException e){
+ throw new ExecutionSetupException("Failure reading JSON file.", e);
+ }
+ }
+
+ @Override
+ public int next() {
+ writer.allocate();
+ writer.reset();
+
+ int i =0;
+
+ try{
+ outside: while(true){
+ writer.setPosition(i);
+
+ switch(jsonReader.write(writer)){
+ case WRITE_SUCCEED:
+ i++;
+ break;
+
+ case NO_MORE:
+// System.out.println("no more records - main loop");
+ break outside;
+
+ case WRITE_FAILED:
+// System.out.println("==== hit bounds at " + i);
+ break outside;
+ };
+ }
+
+
+ writer.setValueCount(i);
+ mutator.setNewSchema();
+ return i;
+
+ }catch(IOException | SchemaChangeException e){
+ throw new DrillRuntimeException("Failure while reading JSON file.", e);
+ }
+
+ }
+
+ @Override
+ public void cleanup() {
+ try {
+ stream.close();
+ } catch (IOException e) {
+ logger.warn("Failure while closing stream.", e);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index 25931db..9e01268 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -224,7 +224,7 @@ public class HiveRecordReader implements RecordReader {
PrimitiveCategory pCat = primitiveCategories.get(i);
MajorType type = getMajorType(pCat);
MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(columnNames.get(i)), type);
- ValueVector vv = output.addField(field, TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
+ ValueVector vv = output.addField(field, (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
vectors.add(vv);
}
for (int i = 0; i < selectedPartitionNames.size(); i++) {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
index eb9e7a6..5c07dc5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java
@@ -76,7 +76,7 @@ public class MockRecordReader implements RecordReader {
for (int i = 0; i < config.getTypes().length; i++) {
MajorType type = config.getTypes()[i].getMajorType();
- valueVectors[i] = output.addField(getVector(config.getTypes()[i].getName(), type, batchRecordCount), TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
+ valueVectors[i] = output.addField(getVector(config.getTypes()[i].getName(), type, batchRecordCount), (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
}
output.setNewSchema();
} catch (SchemaChangeException e) {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRecordReader.java
index 75cd799..5d28456 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetRecordReader.java
@@ -25,6 +25,7 @@ import java.util.HashMap;
import java.util.List;
import com.google.common.base.Preconditions;
+
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.expression.ExpressionPosition;
@@ -41,10 +42,6 @@ import org.apache.drill.exec.physical.impl.OutputMutator;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.store.RecordReader;
import org.apache.drill.exec.vector.*;
-import org.apache.drill.exec.vector.NullableVarBinaryVector;
-import org.apache.drill.exec.vector.NullableVarCharVector;
-import org.apache.drill.exec.vector.VarBinaryVector;
-import org.apache.drill.exec.vector.VarCharVector;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -263,7 +260,7 @@ public class ParquetRecordReader implements RecordReader {
//convertedTypes.put()
fieldFixedLength = column.getType() != PrimitiveType.PrimitiveTypeName.BINARY;
- v = output.addField(field, TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
+ v = output.addField(field, (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()));
if (column.getType() != PrimitiveType.PrimitiveTypeName.BINARY) {
createFixedColumnReader(fieldFixedLength, column, columnChunkMetaData, recordsPerBatch, v,
convertedType);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/AllocationHelper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/AllocationHelper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/AllocationHelper.java
index 86aec44..f7a74c2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/AllocationHelper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/AllocationHelper.java
@@ -19,11 +19,11 @@ package org.apache.drill.exec.vector;
public class AllocationHelper {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AllocationHelper.class);
-
+
public static void allocate(ValueVector v, int valueCount, int bytesPerValue){
allocate(v, valueCount, bytesPerValue, 5);
}
-
+
public static void allocate(ValueVector v, int valueCount, int bytesPerValue, int repeatedPerTop){
if(v instanceof FixedWidthVector){
((FixedWidthVector) v).allocateNew(valueCount);
@@ -34,7 +34,7 @@ public class AllocationHelper {
}else if(v instanceof RepeatedVariableWidthVector){
((RepeatedVariableWidthVector) v).allocateNew(valueCount * bytesPerValue * repeatedPerTop, valueCount, valueCount * repeatedPerTop);
}else{
- throw new UnsupportedOperationException();
+ v.allocateNew();
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
index ddddab1..9641e6a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseDataValueVector.java
@@ -17,21 +17,25 @@
*/
package org.apache.drill.exec.vector;
+import java.util.Iterator;
+
import io.netty.buffer.ByteBuf;
import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.DeadBuf;
import org.apache.drill.exec.record.MaterializedField;
+import com.google.hive12.common.collect.Iterators;
+
public abstract class BaseDataValueVector extends BaseValueVector{
protected ByteBuf data = DeadBuf.DEAD_BUFFER;
protected int valueCount;
-
+
public BaseDataValueVector(MaterializedField field, BufferAllocator allocator) {
super(field, allocator);
-
+
}
/**
@@ -46,7 +50,7 @@ public abstract class BaseDataValueVector extends BaseValueVector{
}
}
-
+
@Override
public ByteBuf[] getBuffers(){
ByteBuf[] out;
@@ -60,18 +64,24 @@ public abstract class BaseDataValueVector extends BaseValueVector{
clear();
return out;
}
-
+
public int getBufferSize() {
if(valueCount == 0) return 0;
return data.writerIndex();
}
@Override
- public abstract FieldMetadata getMetadata();
+ public abstract SerializedField getMetadata();
public ByteBuf getData(){
return data;
}
-
-
+
+ @Override
+ public Iterator<ValueVector> iterator() {
+ return Iterators.emptyIterator();
+ }
+
+
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java
index 7cc1adf..7a61475 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BaseValueVector.java
@@ -17,14 +17,21 @@
*/
package org.apache.drill.exec.vector;
+import java.util.Iterator;
+
import org.apache.drill.common.expression.FieldReference;
+
import io.netty.buffer.ByteBuf;
+
import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.MaterializedField;
+import com.google.hive12.common.collect.Iterators;
+
public abstract class BaseValueVector implements ValueVector{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BaseValueVector.class);
-
+
protected final BufferAllocator allocator;
protected final MaterializedField field;
@@ -32,21 +39,24 @@ public abstract class BaseValueVector implements ValueVector{
this.allocator = allocator;
this.field = field;
}
-
+
@Override
public void close() {
clear();
}
-
+
@Override
public MaterializedField getField() {
return field;
}
-
+
public MaterializedField getField(FieldReference ref){
return getField().clone(ref);
}
-
+
+ protected SerializedField.Builder getMetadataBuilder(){
+ return getField().getAsBuilder();
+ }
abstract public ByteBuf getData();
@@ -54,12 +64,15 @@ public abstract class BaseValueVector implements ValueVector{
public abstract int getValueCount();
public void reset(){}
}
-
+
abstract class BaseMutator implements Mutator{
public void reset(){}
}
-
-
-
+
+ @Override
+ public Iterator<ValueVector> iterator() {
+ return Iterators.emptyIterator();
+ }
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
index 63384a3..597b0f1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/BitVector.java
@@ -24,9 +24,12 @@ import org.apache.drill.exec.expr.holders.BitHolder;
import org.apache.drill.exec.expr.holders.NullableBitHolder;
import org.apache.drill.exec.memory.AccountingByteBuf;
import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.vector.complex.impl.BitReaderImpl;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
/**
* Bit implements a vector of bit-width values. Elements in the vector are accessed by position from the logical start
@@ -49,11 +52,10 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
@Override
- public FieldMetadata getMetadata() {
- return FieldMetadata.newBuilder()
- .setDef(getField().getDef())
- .setValueCount(valueCount)
- .setBufferLength( (int) Math.ceil(valueCount / 8.0))
+ public SerializedField getMetadata() {
+ return field.getAsBuilder() //
+ .setValueCount(valueCount) //
+ .setBufferLength( (int) Math.ceil(valueCount / 8.0)) //
.build();
}
@@ -66,13 +68,26 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
public void allocateNew() {
+ if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ }
+
+ public boolean allocateNewSafe() {
clear();
if (allocationMonitor > 5) {
allocationValueCount = Math.min(1, (int)(allocationValueCount * 0.9));
} else if (allocationMonitor < -5) {
allocationValueCount = (int) (allocationValueCount * 1.1);
}
- allocateNew(allocationValueCount);
+
+ clear();
+ valueCapacity = allocationValueCount;
+ int valueSize = getSizeFromCount(allocationValueCount);
+ data = allocator.buffer(valueSize);
+ if(data == null) return false;
+ for (int i = 0; i < valueSize; i++) {
+ data.setByte(i, 0);
+ }
+ return true;
}
/**
@@ -112,8 +127,8 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
@Override
- public void load(FieldMetadata metadata, ByteBuf buffer) {
- assert this.field.getDef().equals(metadata.getDef());
+ public void load(SerializedField metadata, ByteBuf buffer) {
+ assert this.field.matches(metadata);
int loaded = load(metadata.getValueCount(), buffer);
assert metadata.getBufferLength() == loaded;
}
@@ -177,9 +192,6 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
}
- private void copyTo(int startIndex, int length, BitVector target) {
-
- }
private class TransferImpl implements TransferPair {
BitVector to;
@@ -205,8 +217,8 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
@Override
- public void copyValue(int fromIndex, int toIndex) {
- to.copyFrom(fromIndex, toIndex, BitVector.this);
+ public boolean copyValueSafe(int fromIndex, int toIndex) {
+ return to.copyFromSafe(fromIndex, toIndex, BitVector.this);
}
}
@@ -233,7 +245,7 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
}
@Override
- public final Object getObject(int index) {
+ public final Boolean getObject(int index) {
return new Boolean(get(index) != 0);
}
@@ -245,9 +257,15 @@ public final class BitVector extends BaseDataValueVector implements FixedWidthVe
holder.value = get(index);
}
- final void get(int index, NullableBitHolder holder) {
+ public final void get(int index, NullableBitHolder holder) {
+ holder.isSet = 1;
holder.value = get(index);
}
+
+ @Override
+ public FieldReader getReader() {
+ return new BitReaderImpl(BitVector.this);
+ }
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedMutator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedMutator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedMutator.java
index 8e097e4..ad2ba1b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedMutator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/RepeatedMutator.java
@@ -18,5 +18,5 @@
package org.apache.drill.exec.vector;
public interface RepeatedMutator extends ValueVector.Mutator {
- public void startNewGroup(int index);
+ public boolean startNewGroup(int index);
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueVector.java
index 258b354..8b871fc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueVector.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/ValueVector.java
@@ -22,24 +22,34 @@ import io.netty.buffer.ByteBuf;
import java.io.Closeable;
import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
/**
* ValueVectorTypes defines a set of template-generated classes which implement type-specific value vectors. The
* template approach was chosen due to the lack of multiple inheritence. It is also important that all related logic be
* as efficient as possible.
*/
-public interface ValueVector extends Closeable {
+public interface ValueVector extends Closeable, Iterable<ValueVector> {
+
/**
* Allocate new buffers. ValueVector implements logic to determine how much to allocate.
+ * @throws OutOfMemoryRuntimeException Thrown if no memory can be allocated.
*/
- public void allocateNew();
+ public void allocateNew() throws OutOfMemoryRuntimeException;
+
+ /**
+ * Allocates new buffers. ValueVector implements logic to determine how much to allocate.
+ * @return Returns true if allocation was succesful.
+ */
+ public boolean allocateNewSafe();
public int getBufferSize();
-
+
/**
* Alternative to clear(). Allows use as closeable in try-with-resources.
*/
@@ -52,27 +62,26 @@ public interface ValueVector extends Closeable {
/**
* Get information about how this field is materialized.
- *
+ *
* @return
*/
public MaterializedField getField();
/**
- * Get a transfer pair to allow transferring this vectors data between this vector and a destination vector of the same
- * type. Will also generate a second instance of this vector class that is connected through the TransferPair.
- *
- * @return
+ * Get a transfer pair to allow transferring this vectors data between this vector and a destination vector of the
+ * same type. Will also generate a second instance of this vector class that is connected through the TransferPair.
+ *
+ * @return
*/
public TransferPair getTransferPair();
public TransferPair makeTransferPair(ValueVector to);
-
-
+
public TransferPair getTransferPair(FieldReference ref);
/**
* Given the current buffer allocation, return the maximum number of values that this buffer can contain.
- *
+ *
* @return Maximum values buffer can contain. In the case of a Repeated field, this is the number of atoms, not
* repeated groups.
*/
@@ -80,37 +89,40 @@ public interface ValueVector extends Closeable {
/**
* Get Accessor to read value vector data.
- *
+ *
* @return
*/
public abstract Accessor getAccessor();
/**
- * Return the underlying buffers associated with this vector. Note that this doesn't impact the
- * reference counts for this buffer so it only should be used for in-context access. Also note
- * that this buffer changes regularly thus external classes shouldn't hold a reference to
- * it (unless they change it).
+ * Return the underlying buffers associated with this vector. Note that this doesn't impact the reference counts for
+ * this buffer so it only should be used for in-context access. Also note that this buffer changes regularly thus
+ * external classes shouldn't hold a reference to it (unless they change it).
*
* @return The underlying ByteBuf.
*/
public abstract ByteBuf[] getBuffers();
-
+
/**
- * Load the data provided in the buffer. Typically used when deserializing from the wire.
- * @param metadata Metadata used to decode the incoming buffer.
- * @param buffer The buffer that contains the ValueVector.
+ * Load the data provided in the buffer. Typically used when deserializing from the wire.
+ *
+ * @param metadata
+ * Metadata used to decode the incoming buffer.
+ * @param buffer
+ * The buffer that contains the ValueVector.
*/
- public void load(FieldMetadata metadata, ByteBuf buffer);
-
+ public void load(SerializedField metadata, ByteBuf buffer);
+
/**
- * Get the metadata for this field. Used in serialization
+ * Get the metadata for this field. Used in serialization
+ *
* @return FieldMetadata for this field.
*/
- public FieldMetadata getMetadata();
-
+ public SerializedField getMetadata();
+
/**
* Get a Mutator to update this vectors data.
- *
+ *
* @return
*/
public abstract Mutator getMutator();
@@ -125,23 +137,25 @@ public interface ValueVector extends Closeable {
/**
* Get the Java Object representation of the element at the specified position. Useful for testing.
- *
+ *
* @param index
* Index of the value to get
*/
public abstract Object getObject(int index);
public int getValueCount();
-
+
public boolean isNull(int index);
public void reset();
+
+ public FieldReader getReader();
}
public interface Mutator {
/**
* Set the top number values (optional/required) or number of value groupings (repeated) in this vector.
- *
+ *
* @param valueCount
*/
public void setValueCount(int valueCount);
@@ -150,4 +164,5 @@ public interface ValueVector extends Closeable {
public void generateTestData(int values);
}
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/GenericAccessor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/GenericAccessor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/GenericAccessor.java
index 32f08b0..adee171 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/GenericAccessor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/accessor/GenericAccessor.java
@@ -40,6 +40,6 @@ public class GenericAccessor extends AbstractSqlAccessor {
@Override
TypeProtos.MajorType getType() {
- return v.getMetadata().getDef().getMajorType();
+ return v.getMetadata().getMajorType();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java
new file mode 100644
index 0000000..ab1d270
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractContainerVector.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.record.TypedFieldId;
+import org.apache.drill.exec.vector.ValueVector;
+
+public abstract class AbstractContainerVector implements ValueVector{
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractContainerVector.class);
+
+ public abstract <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz);
+ public abstract <T extends ValueVector> T get(String name, Class<T> clazz);
+ public abstract int size();
+
+ protected <T extends ValueVector> T typeify(ValueVector v, Class<T> clazz){
+ if(clazz.isAssignableFrom(v.getClass())){
+ return (T) v;
+ }else{
+ throw new IllegalStateException(String.format("Vector requested [%s] was different than type stored [%s]. Drill doesn't yet support hetergenous types.", clazz.getSimpleName(), v.getClass().getSimpleName()));
+ }
+ }
+
+ public abstract VectorWithOrdinal getVectorWithOrdinal(String name);
+
+
+ public TypedFieldId getFieldIdIfMatches(TypedFieldId.Builder builder, boolean addToBreadCrumb, PathSegment seg){
+ if(seg == null){
+ if(addToBreadCrumb) builder.intermediateType(this.getField().getType());
+ return builder.finalType(this.getField().getType()).build();
+ }
+
+ if(seg.isArray()){
+
+ if(seg.isLastPath()){
+ if(addToBreadCrumb) builder.intermediateType(this.getField().getType());
+ return builder //
+ .remainder(seg) //
+ .finalType(this.getField().getType()) //
+ .withIndex() //
+ .build();
+ }else{
+ if(addToBreadCrumb){
+ addToBreadCrumb = false;
+ builder.remainder(seg);
+ }
+ // this is a complex array reference, which means it doesn't correspond directly to a vector by itself.
+ seg = seg.getChild();
+
+ }
+
+ }else{
+ // name segment.
+ }
+
+ VectorWithOrdinal vord = getVectorWithOrdinal(seg.isArray() ? null : seg.getNameSegment().getPath());
+ if(vord == null) return null;
+
+
+ if(addToBreadCrumb){
+ builder.intermediateType(this.getField().getType());
+ builder.addId(vord.ordinal);
+ }
+
+ ValueVector v = vord.vector;
+
+ if(v instanceof AbstractContainerVector){
+ // we're looking for a multi path.
+ AbstractContainerVector c = (AbstractContainerVector) v;
+ return c.getFieldIdIfMatches(builder, addToBreadCrumb, seg.getChild());
+ }else{
+ if(seg.isLastPath()){
+ if(addToBreadCrumb) builder.intermediateType(v.getField().getType());
+ return builder.finalType(v.getField().getType()).build();
+ }else{
+ logger.warn("You tried to request a complex type inside a scalar object.");
+ return null;
+ }
+ }
+
+ }
+
+ protected boolean supportsDirectRead(){
+ return false;
+ }
+
+ protected class VectorWithOrdinal{
+ final ValueVector vector;
+ final int ordinal;
+
+ public VectorWithOrdinal(ValueVector v, int ordinal){
+ this.vector = v;
+ this.ordinal = ordinal;
+ }
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java
new file mode 100644
index 0000000..f126e5c
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/AbstractMapVector.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+public class AbstractMapVector {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractMapVector.class);
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java
new file mode 100644
index 0000000..91c0be5
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import io.netty.buffer.ByteBuf;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.expr.holders.ComplexHolder;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.RepeatedMapVector.MapSingleCopier;
+import org.apache.drill.exec.vector.complex.impl.SingleMapReaderImpl;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+import com.carrotsearch.hppc.IntObjectOpenHashMap;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Maps;
+import com.google.hive12.common.collect.Lists;
+
+public class MapVector extends AbstractContainerVector {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MapVector.class);
+
+ public final static MajorType TYPE = MajorType.newBuilder().setMinorType(MinorType.MAP).setMode(DataMode.REQUIRED).build();
+
+ final HashMap<String, ValueVector> vectors = Maps.newHashMap();
+ private final Map<String, VectorWithOrdinal> vectorIds = Maps.newHashMap();
+ private final IntObjectOpenHashMap<ValueVector> vectorsById = new IntObjectOpenHashMap<>();
+ private final SingleMapReaderImpl reader = new SingleMapReaderImpl(MapVector.this);
+ private final Accessor accessor = new Accessor();
+ private final Mutator mutator = new Mutator();
+ private final BufferAllocator allocator;
+ private MaterializedField field;
+ private int valueCount;
+
+ public MapVector(String path, BufferAllocator allocator){
+ this.field = MaterializedField.create(SchemaPath.getSimplePath(path), TYPE);
+ this.allocator = allocator;
+ }
+ public MapVector(MaterializedField field, BufferAllocator allocator){
+ this.field = field;
+ this.allocator = allocator;
+ }
+
+ public int size(){
+ return vectors.size();
+ }
+
+ transient private MapTransferPair ephPair;
+ transient private MapSingleCopier ephPair2;
+
+ public boolean copyFromSafe(int fromIndex, int thisIndex, MapVector from){
+ if(ephPair == null || ephPair.from != from){
+ ephPair = (MapTransferPair) from.makeTransferPair(this);
+ }
+ return ephPair.copyValueSafe(fromIndex, thisIndex);
+ }
+
+ public boolean copyFromSafe(int fromSubIndex, int thisIndex, RepeatedMapVector from){
+ if(ephPair2 == null || ephPair2.from != from){
+ ephPair2 = from.makeSingularCopier(this);
+ }
+ return ephPair2.copySafe(fromSubIndex, thisIndex);
+ }
+
+ @Override
+ public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
+ ValueVector v = vectors.get(name);
+
+ if(v == null){
+ v = TypeHelper.getNewVector(field.getPath(), name, allocator, type);
+ Preconditions.checkNotNull(v, String.format("Failure to create vector of type %s.", type));
+ put(name, v);
+ }
+ return typeify(v, clazz);
+
+ }
+
+ protected void put(String name, ValueVector vv){
+ int ordinal = vectors.size();
+ if(vectors.put(name, vv) != null){
+ throw new IllegalStateException();
+ }
+ vectorIds.put(name, new VectorWithOrdinal(vv, ordinal));
+ vectorsById.put(ordinal, vv);
+ field.addChild(vv.getField());
+ }
+
+
+ @Override
+ protected boolean supportsDirectRead() {
+ return true;
+ }
+
+ public Iterator<String> fieldNameIterator(){
+ return vectors.keySet().iterator();
+ }
+
+ @Override
+ public void allocateNew() throws OutOfMemoryRuntimeException {
+ if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ }
+
+ @Override
+ public boolean allocateNewSafe() {
+ for(ValueVector v : vectors.values()){
+ if(!v.allocateNewSafe()) return false;
+ }
+ return true;
+ }
+
+ @Override
+ public <T extends ValueVector> T get(String name, Class<T> clazz) {
+ ValueVector v = vectors.get(name);
+ if(v == null) throw new IllegalStateException(String.format("Attempting to access invalid map field of name %s.", name));
+ return typeify(v, clazz);
+ }
+
+ @Override
+ public int getBufferSize() {
+ if(valueCount == 0 || vectors.isEmpty()) return 0;
+ long buffer = 0;
+ for(ValueVector v : this){
+ buffer += v.getBufferSize();
+ }
+
+ return (int) buffer;
+ }
+
+ @Override
+ public void close() {
+ for(ValueVector v : this){
+ v.close();
+ }
+ }
+
+ @Override
+ public Iterator<ValueVector> iterator() {
+ return vectors.values().iterator();
+ }
+
+ @Override
+ public MaterializedField getField() {
+ return field;
+ }
+
+ @Override
+ public TransferPair getTransferPair() {
+ return new MapTransferPair(field.getPath());
+ }
+
+ @Override
+ public TransferPair makeTransferPair(ValueVector to) {
+ return new MapTransferPair( (MapVector) to);
+ }
+
+ @Override
+ public TransferPair getTransferPair(FieldReference ref) {
+ return new MapTransferPair(ref);
+ }
+
+ private class MapTransferPair implements TransferPair{
+ private MapVector from = MapVector.this;
+ private TransferPair[] pairs;
+ private MapVector to;
+
+ public MapTransferPair(SchemaPath path){
+ MapVector v = new MapVector(MaterializedField.create(path, TYPE), allocator);
+ pairs = new TransferPair[vectors.size()];
+ int i =0;
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ TransferPair otherSide = e.getValue().getTransferPair();
+ v.put(e.getKey(), otherSide.getTo());
+ pairs[i++] = otherSide;
+ }
+ this.to = v;
+ }
+
+ public MapTransferPair(MapVector to){
+ this.to = to;
+ pairs = new TransferPair[vectors.size()];
+ int i =0;
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ int preSize = to.vectors.size();
+ ValueVector v = to.addOrGet(e.getKey(), e.getValue().getField().getType(), e.getValue().getClass());
+ if(to.vectors.size() != preSize) v.allocateNew();
+ pairs[i++] = e.getValue().makeTransferPair(v);
+ }
+ }
+
+
+ @Override
+ public void transfer() {
+ for(TransferPair p : pairs){
+ p.transfer();
+ }
+ to.valueCount = valueCount;
+ clear();
+ }
+
+ @Override
+ public ValueVector getTo() {
+ return to;
+ }
+
+ @Override
+ public boolean copyValueSafe(int from, int to) {
+ for(TransferPair p : pairs){
+ if(!p.copyValueSafe(from, to)) return false;
+ }
+ return true;
+ }
+
+ @Override
+ public void splitAndTransfer(int startIndex, int length) {
+ throw new UnsupportedOperationException();
+ }
+
+ }
+
+ @Override
+ public int getValueCapacity() {
+ if(this.vectors.isEmpty()) return 0;
+ return vectors.values().iterator().next().getValueCapacity();
+ }
+
+ @Override
+ public Accessor getAccessor() {
+ return accessor;
+ }
+
+ @Override
+ public ByteBuf[] getBuffers() {
+ List<ByteBuf> bufs = Lists.newArrayList();
+ for(ValueVector v : vectors.values()){
+ for(ByteBuf b : v.getBuffers()){
+ bufs.add(b);
+ }
+ }
+ return bufs.toArray(new ByteBuf[bufs.size()]);
+ }
+
+ @Override
+ public void load(SerializedField metadata, ByteBuf buf) {
+ List<SerializedField> fields = metadata.getChildList();
+
+ int bufOffset = 0;
+ for (SerializedField fmd : fields) {
+ MaterializedField fieldDef = MaterializedField.create(fmd);
+
+ ValueVector v = vectors.get(fieldDef.getLastName());
+ if(v == null) {
+ // if we arrive here, we didn't have a matching vector.
+
+ v = TypeHelper.getNewVector(fieldDef, allocator);
+ }
+ if (fmd.getValueCount() == 0){
+ v.clear();
+ } else {
+ v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
+ }
+ bufOffset += fmd.getBufferLength();
+ put(fieldDef.getLastName(), v);
+ }
+ }
+
+ @Override
+ public SerializedField getMetadata() {
+ SerializedField.Builder b = getField() //
+ .getAsBuilder() //
+ .setBufferLength(getBufferSize()) //
+ .setValueCount(valueCount);
+
+
+ for(ValueVector v : vectors.values()){
+ b.addChild(v.getMetadata());
+ }
+ return b.build();
+ }
+
+ @Override
+ public Mutator getMutator() {
+ return mutator;
+ }
+
+ public class Accessor implements ValueVector.Accessor{
+
+ @Override
+ public Object getObject(int index) {
+ Map<String, Object> vv = Maps.newHashMap();
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ ValueVector v = e.getValue();
+ String k = e.getKey();
+ Object value = v.getAccessor().getObject(index);
+ if(value != null){
+ vv.put(k, value);
+ }
+ }
+ return vv;
+ }
+
+ public void get(int index, ComplexHolder holder){
+ reader.setPosition(index);
+ holder.reader = reader;
+ }
+
+ @Override
+ public int getValueCount() {
+ return valueCount;
+ }
+
+ @Override
+ public boolean isNull(int index) {
+ return false;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public FieldReader getReader() {
+ return new SingleMapReaderImpl(MapVector.this);
+ }
+
+ }
+
+ public ValueVector getVectorById(int id){
+ return vectorsById.get(id);
+ }
+
+ public class Mutator implements ValueVector.Mutator{
+
+ @Override
+ public void setValueCount(int valueCount) {
+ for(ValueVector v : vectors.values()){
+ v.getMutator().setValueCount(valueCount);
+ }
+ MapVector.this.valueCount = valueCount;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public void generateTestData(int values) {
+ }
+
+ }
+
+ @Override
+ public void clear() {
+ for(ValueVector v : vectors.values()){
+ v.clear();;
+ }
+ }
+
+ public VectorWithOrdinal getVectorWithOrdinal(String name){
+ return vectorIds.get(name);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/Positionable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/Positionable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/Positionable.java
new file mode 100644
index 0000000..6d86a64
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/Positionable.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+public interface Positionable {
+ public void setPosition(int index);
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java
new file mode 100644
index 0000000..93930b5
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedListVector.java
@@ -0,0 +1,407 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import io.netty.buffer.ByteBuf;
+
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.expr.holders.ComplexHolder;
+import org.apache.drill.exec.expr.holders.RepeatedListHolder;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.vector.RepeatedFixedWidthVector;
+import org.apache.drill.exec.vector.UInt4Vector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.impl.NullReader;
+import org.apache.drill.exec.vector.complex.impl.RepeatedListReaderImpl;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+import com.google.common.collect.Lists;
+import com.google.hive12.common.base.Preconditions;
+
+
+public class RepeatedListVector extends AbstractContainerVector implements RepeatedFixedWidthVector{
+
+ private final UInt4Vector offsets; // offsets to start of each record
+ private final BufferAllocator allocator;
+ private final Mutator mutator = new Mutator();
+ private final Accessor accessor = new Accessor();
+ private ValueVector vector;
+ private final MaterializedField field;
+ private final RepeatedListReaderImpl reader = new RepeatedListReaderImpl(null, this);
+ private int allocationValueCount = 4000;
+ private int allocationMonitor = 0;
+
+ private int lastSet = 0;
+
+ private int valueCount;
+
+ public static MajorType TYPE = Types.repeated(MinorType.LIST);
+
+ public RepeatedListVector(MaterializedField field, BufferAllocator allocator){
+ this.allocator = allocator;
+ this.offsets = new UInt4Vector(null, allocator);
+ this.field = field;
+ }
+
+ public int size(){
+ return vector != null ? 1 : 0;
+ }
+
+ public RepeatedListVector(SchemaPath path, BufferAllocator allocator){
+ this(MaterializedField.create(path, TYPE), allocator);
+ }
+
+ transient private RepeatedListTransferPair ephPair;
+
+ public boolean copyFromSafe(int fromIndex, int thisIndex, RepeatedListVector from){
+ if(ephPair == null || ephPair.from != from){
+ ephPair = (RepeatedListTransferPair) from.makeTransferPair(this);
+ }
+ return ephPair.copyValueSafe(fromIndex, thisIndex);
+ }
+
+ public Mutator getMutator(){
+ return mutator;
+ }
+
+ @Override
+ public void allocateNew() throws OutOfMemoryRuntimeException {
+ if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ }
+
+ @Override
+ public boolean allocateNewSafe() {
+ if(!offsets.allocateNewSafe()) return false;
+
+ if(vector != null){
+ return vector.allocateNewSafe();
+ }else{
+ return true;
+ }
+
+ }
+
+ public class Mutator implements ValueVector.Mutator{
+
+ public void startNewGroup(int index) {
+ offsets.getMutator().set(index+1, offsets.getAccessor().get(index));
+ }
+
+ public int add(int index){
+ int endOffset = index+1;
+ int currentChildOffset = offsets.getAccessor().get(endOffset);
+ int newChildOffset = currentChildOffset + 1;
+ boolean success = offsets.getMutator().setSafe(endOffset, newChildOffset);
+ lastSet = index;
+ if(!success) return -1;
+
+ // this is done at beginning so return the currentChildOffset, not the new offset.
+ return currentChildOffset;
+
+ }
+
+ @Override
+ public void setValueCount(int groupCount) {
+ populateEmpties(groupCount);
+ offsets.getMutator().setValueCount(groupCount+1);
+
+ if(vector != null){
+ int valueCount = offsets.getAccessor().get(groupCount);
+ vector.getMutator().setValueCount(valueCount);
+ }
+ }
+
+ @Override
+ public void reset() {
+ lastSet = 0;
+ }
+
+ @Override
+ public void generateTestData(int values) {
+ }
+
+ }
+
+ public class Accessor implements ValueVector.Accessor {
+
+ @Override
+ public Object getObject(int index) {
+ List<Object> l = Lists.newArrayList();
+ int end = offsets.getAccessor().get(index+1);
+ for(int i = offsets.getAccessor().get(index); i < end; i++){
+ l.add(vector.getAccessor().getObject(i));
+ }
+ return l;
+ }
+
+ @Override
+ public int getValueCount() {
+ return offsets.getAccessor().getValueCount() - 1;
+ }
+
+ public void get(int index, RepeatedListHolder holder){
+ assert index <= getValueCapacity();
+ holder.start = offsets.getAccessor().get(index);
+ holder.end = offsets.getAccessor().get(index+1);
+ }
+
+ public void get(int index, ComplexHolder holder){
+ FieldReader reader = getReader();
+ reader.setPosition(index);
+ holder.reader = reader;
+ }
+
+ public void get(int index, int arrayIndex, ComplexHolder holder){
+ RepeatedListHolder h = new RepeatedListHolder();
+ get(index, h);
+ int offset = h.start + arrayIndex;
+
+ if(offset >= h.end){
+ holder.reader = NullReader.INSTANCE;
+ }else{
+ FieldReader r = vector.getAccessor().getReader();
+ r.setPosition(offset);
+ holder.reader = r;
+ }
+
+ }
+
+ @Override
+ public boolean isNull(int index) {
+ return false;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public FieldReader getReader() {
+ return reader;
+ }
+
+ }
+
+ @Override
+ public int getBufferSize() {
+ return offsets.getBufferSize() + vector.getBufferSize();
+ }
+
+ @Override
+ public void close() {
+ offsets.close();
+ if(vector != null) vector.close();
+ }
+
+ @Override
+ public void clear() {
+ lastSet = 0;
+ offsets.clear();
+ if(vector != null) vector.clear();
+ }
+
+ @Override
+ public MaterializedField getField() {
+ return field;
+ }
+
+ @Override
+ public TransferPair getTransferPair() {
+ return new RepeatedListTransferPair(field.getPath());
+ }
+
+
+ public class RepeatedListTransferPair implements TransferPair{
+ private final RepeatedListVector from = RepeatedListVector.this;
+ private final RepeatedListVector to;
+ private final TransferPair vectorTransfer;
+
+ private RepeatedListTransferPair(RepeatedListVector to){
+ this.to = to;
+ if(to.vector == null){
+ to.vector = to.addOrGet(null, vector.getField().getType(), vector.getClass());
+ to.vector.allocateNew();
+ }
+ this.vectorTransfer = vector.makeTransferPair(to.vector);
+ }
+
+ private RepeatedListTransferPair(SchemaPath path){
+ this.to = new RepeatedListVector(path, allocator);
+ vectorTransfer = vector.getTransferPair();
+ this.to.vector = vectorTransfer.getTo();
+ }
+
+ @Override
+ public void transfer() {
+ offsets.transferTo(to.offsets);
+ vectorTransfer.transfer();
+ to.valueCount = valueCount;
+ clear();
+ }
+
+ @Override
+ public ValueVector getTo() {
+ return to;
+ }
+
+ @Override
+ public void splitAndTransfer(int startIndex, int length) {
+ throw new UnsupportedOperationException();
+ }
+
+
+ @Override
+ public boolean copyValueSafe(int from, int to) {
+ RepeatedListHolder holder = new RepeatedListHolder();
+ accessor.get(from, holder);
+ int newIndex = this.to.offsets.getAccessor().get(to);
+ //todo: make this a bulk copy.
+ for(int i = holder.start; i < holder.end; i++, newIndex++){
+ if(!vectorTransfer.copyValueSafe(i, newIndex)) return false;
+ }
+ if(!this.to.offsets.getMutator().setSafe(to, newIndex)) return false;
+
+ return true;
+ }
+
+ }
+
+ @Override
+ public TransferPair makeTransferPair(ValueVector to) {
+ if(!(to instanceof RepeatedListVector ) ) throw new IllegalArgumentException("You can't make a transfer pair from an incompatible .");
+ return new RepeatedListTransferPair( (RepeatedListVector) to);
+ }
+
+ @Override
+ public TransferPair getTransferPair(FieldReference ref) {
+ return new RepeatedListTransferPair(ref);
+ }
+
+ @Override
+ public int getValueCapacity() {
+ if(vector == null) return offsets.getValueCapacity() - 1;
+ return Math.min(offsets.getValueCapacity() - 1, vector.getValueCapacity());
+ }
+
+ @Override
+ public Accessor getAccessor() {
+ return accessor;
+ }
+
+ @Override
+ public ByteBuf[] getBuffers() {
+ return ArrayUtils.addAll(offsets.getBuffers(), vector.getBuffers());
+ }
+
+ private void setVector(ValueVector v){
+ field.addChild(v.getField());
+ this.vector = v;
+ }
+
+ @Override
+ public void load(SerializedField metadata, ByteBuf buf) {
+ SerializedField childField = metadata.getChildList().get(0);
+
+ int bufOffset = offsets.load(metadata.getValueCount()+1, buf);
+
+ MaterializedField fieldDef = MaterializedField.create(childField);
+ if(vector == null) {
+ setVector(TypeHelper.getNewVector(fieldDef, allocator));
+ }
+
+ if (childField.getValueCount() == 0){
+ vector.clear();
+ } else {
+ vector.load(childField, buf.slice(bufOffset, childField.getBufferLength()));
+ }
+ }
+
+ @Override
+ public SerializedField getMetadata() {
+ return getField() //
+ .getAsBuilder() //
+ .setBufferLength(getBufferSize()) //
+ .setValueCount(accessor.getValueCount()) //
+ .addChild(vector.getMetadata()) //
+ .build();
+ }
+
+ private void populateEmpties(int groupCount){
+ int previousEnd = offsets.getAccessor().get(lastSet + 1);
+ for(int i = lastSet + 2; i <= groupCount; i++){
+ offsets.getMutator().setSafe(i, previousEnd);
+ }
+ lastSet = groupCount - 1;
+ }
+
+ @Override
+ public Iterator<ValueVector> iterator() {
+ return Collections.singleton(vector).iterator();
+ }
+
+ @Override
+ public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
+ Preconditions.checkArgument(name == null);
+
+ if(vector == null){
+ vector = TypeHelper.getNewVector(MaterializedField.create(field.getPath().getUnindexedArrayChild(), type), allocator);
+ }
+ return typeify(vector, clazz);
+ }
+
+ @Override
+ public <T extends ValueVector> T get(String name, Class<T> clazz) {
+ if(name != null) return null;
+ return typeify(vector, clazz);
+ }
+
+ @Override
+ public void allocateNew(int parentValueCount, int childValueCount) {
+ clear();
+ offsets.allocateNew(parentValueCount+1);
+ mutator.reset();
+ accessor.reset();
+ }
+
+ @Override
+ public int load(int parentValueCount, int childValueCount, ByteBuf buf) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public VectorWithOrdinal getVectorWithOrdinal(String name) {
+ if(name != null) return null;
+ return new VectorWithOrdinal(vector, 0);
+ }
+
+
+}
[05/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/expr/ExpressionTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/ExpressionTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/ExpressionTest.java
index 3fff0e4..c1452cd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/expr/ExpressionTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/expr/ExpressionTest.java
@@ -62,14 +62,14 @@ public class ExpressionTest extends ExecTest {
@Test
public void testSpecial(final @Injectable RecordBatch batch, @Injectable ValueVector vector) throws Exception {
- final TypedFieldId tfid = new TypedFieldId(Types.optional(MinorType.INT),0, false);
+ final TypedFieldId tfid = new TypedFieldId(Types.optional(MinorType.INT), false, 0);
new NonStrictExpectations() {
@NonStrict VectorWrapper<?> wrapper;
{
batch.getValueVectorId(new SchemaPath("alpha", ExpressionPosition.UNKNOWN));
result = tfid;
- batch.getValueAccessorById(tfid.getFieldId(), IntVector.class);
+ batch.getValueAccessorById(IntVector.class, tfid.getFieldIds());
result = wrapper;
wrapper.getValueVector();
result = new IntVector(null, null);
@@ -81,7 +81,7 @@ public class ExpressionTest extends ExecTest {
@Test
public void testSchemaExpression(final @Injectable RecordBatch batch) throws Exception {
- final TypedFieldId tfid = new TypedFieldId(Types.optional(MinorType.BIGINT), 0, false);
+ final TypedFieldId tfid = new TypedFieldId(Types.optional(MinorType.BIGINT), false, 0);
new Expectations() {
{
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
index 92c49b9..a2ba851 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/impl/TestDateFunctions.java
@@ -34,7 +34,7 @@ import org.junit.Test;
import java.util.List;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
public class TestDateFunctions extends PopUnitTestBase {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestDateFunctions.class);
@@ -62,7 +62,7 @@ public class TestDateFunctions extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
System.out.println(accessor.getObject(0));
- assertTrue((accessor.getObject(0)).toString().equals(expectedResults[i++]));
+ assertEquals( expectedResults[i++], accessor.getObject(0).toString());
}
batchLoader.clear();
@@ -74,49 +74,49 @@ public class TestDateFunctions extends PopUnitTestBase {
@Test
public void testDateIntervalArithmetic() throws Exception {
- String expectedResults[] = {"2009-02-23",
- "2008-02-24",
- "13:20:33",
- "2008-02-24 12:00:00.0",
- "2009-04-23 12:00:00.0",
- "2008-02-24 12:00:00.0",
- "2009-04-23 12:00:00.0",
- "2009-02-23",
- "2008-02-24",
- "13:20:33",
- "2008-02-24 12:00:00.0",
- "2009-04-23 12:00:00.0",
- "2008-02-24 12:00:00.0",
- "2009-04-23 12:00:00.0"};
-
+ String expectedResults[] = {"2009-02-23T00:00:00.000-08:00",
+ "2008-02-24T00:00:00.000-08:00",
+ "1970-01-01T13:20:33.000-08:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00",
+ "2009-02-23T00:00:00.000-08:00",
+ "2008-02-24T00:00:00.000-08:00",
+ "1970-01-01T13:20:33.000-08:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00",
+ "2008-02-24T12:00:00.000-08:00",
+ "2009-04-23T12:00:00.000-07:00"};
testCommon(expectedResults, "/functions/date/date_interval_arithmetic.json", "/test_simple_date.json");
}
@Test
public void testDateDifferenceArithmetic() throws Exception {
- String[] expectedResults = {"365 days 0:0:0.0",
- "-366 days 0:-1:0.0",
- "0 days 3:0:0.0",
- "0 days 11:0:0.0"};
+ String[] expectedResults = {"P365D",
+ "P-366DT-60S",
+ "PT10800S",
+ "PT39600S"};
testCommon(expectedResults, "/functions/date/date_difference_arithmetic.json", "/test_simple_date.json");
}
@Test
public void testIntervalArithmetic() throws Exception {
- String[] expectedResults = {"2 years 2 months ",
- "2 days 1:2:3.0",
- "0 years 2 months ",
- "0 days 1:2:3.0",
- "2 years 4 months 0 days 0:0:0.0",
- "0 years 0 months 0 days 2:0:6.0",
- "0 years 7 months 0 days 0:0:0.0",
- "0 years 0 months 0 days 0:30:1.500",
- "2 years 9 months 18 days 0:0:0.0",
- "0 years 0 months 0 days 2:24:7.200",
- "0 years 6 months 19 days 23:59:59.999",
- "0 years 0 months 0 days 0:28:35.714"};
+ String expectedResults[] = {"P2Y2M",
+ "P2DT3723S",
+ "P2M",
+ "PT3723S",
+ "P28M",
+ "PT7206S",
+ "P7M",
+ "PT1801.500S",
+ "P33M18D",
+ "PT8647.200S",
+ "P6M19DT86399.999S",
+ "PT1715.714S"};
+
testCommon(expectedResults, "/functions/date/interval_arithmetic.json", "/test_simple_date.json");
}
@@ -132,10 +132,10 @@ public class TestDateFunctions extends PopUnitTestBase {
@Test
public void testToDateType() throws Exception {
- String expectedResults[] = {"2008-02-23",
- "12:20:30",
- "2008-02-23 12:00:00.0",
- "2008-02-23 12:00:00.0"};
+ String expectedResults[] = {"2008-02-23T00:00:00.000-08:00",
+ "1970-01-01T12:20:30.000-08:00",
+ "2008-02-23T12:00:00.000-08:00",
+ "2008-02-23T12:00:00.000-08:00"};
testCommon(expectedResults, "/functions/date/to_date_type.json", "/test_simple_date.json");
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
index c3f68fd..62f9a21 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/SimpleRootExec.java
@@ -63,7 +63,7 @@ public class SimpleRootExec implements RootExec, Iterable<ValueVector>{
@SuppressWarnings("unchecked")
public <T extends ValueVector> T getValueVectorById(SchemaPath path, Class<?> vvClass){
TypedFieldId tfid = incoming.getValueVectorId(path);
- return (T) incoming.getValueAccessorById(tfid.getFieldId(), vvClass).getValueVector();
+ return (T) incoming.getValueAccessorById(vvClass, tfid.getFieldIds()).getValueVector();
}
@Override
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestCastFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestCastFunctions.java
index bcf7e69..402df63 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestCastFunctions.java
@@ -451,7 +451,7 @@ public class TestCastFunctions extends PopUnitTestBase{
for (int i = 0; i<result.length; i++ ) {
for (int j = 0; j<result[0].length; j++) {
- assertEquals(String.format("Column %s at row %s have wrong result", j, i), result[i][j], expected[i][j]);
+ assertEquals(String.format("Column %s at row %s have wrong result", j, i), result[i][j].toString(), expected[i][j]);
}
}
batchLoader.clear();
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
index 9a88912..d48f91f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java
@@ -25,9 +25,7 @@ import static org.junit.Assert.fail;
import io.netty.buffer.ByteBuf;
import java.text.DateFormat;
-import java.text.ParseException;
import java.text.SimpleDateFormat;
-import java.util.Date;
import java.util.List;
import mockit.Injectable;
@@ -36,6 +34,7 @@ import mockit.NonStrictExpectations;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.exec.client.DrillClient;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.expr.fn.impl.DateUtility;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.memory.TopLevelAllocator;
import org.apache.drill.exec.ops.FragmentContext;
@@ -58,6 +57,7 @@ import org.apache.drill.exec.util.VectorUtil;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.VarCharVector;
import org.junit.Test;
+import org.joda.time.DateTime;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Charsets;
@@ -71,24 +71,12 @@ public class TestConvertFunctions extends PopUnitTestBase {
private static final float DELTA = (float) 0.0001;
- private static final DateFormat DATE_FORMAT;
- private static final DateFormat DATE_TIME_FORMAT;
-
// "1980-01-01 01:23:45.678"
private static final String DATE_TIME_BE = "\\x00\\x00\\x00\\x49\\x77\\x85\\x1f\\x8e";
private static final String DATE_TIME_LE = "\\x8e\\x1f\\x85\\x77\\x49\\x00\\x00\\x00";
- private static Date time = null;
- private static Date date = null;
-
- static {
- DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd");
- DATE_TIME_FORMAT = new SimpleDateFormat("HH:mm:ss.SSS");
- try {
- time = DATE_TIME_FORMAT.parse("01:23:45.678"); // 5025678
- date = DATE_FORMAT.parse("1980-01-01"); // 0x4977387000
- } catch (ParseException e) { }
- }
+ private static DateTime time = DateTime.parse("01:23:45.678", DateUtility.getTimeFormatter());
+ private static DateTime date = DateTime.parse("1980-01-01", DateUtility.getDateTimeFormatter());
DrillConfig c = DrillConfig.create();
PhysicalPlanReader reader;
@@ -99,7 +87,7 @@ public class TestConvertFunctions extends PopUnitTestBase {
@Test
public void testDateTime1(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
- runTest(bitContext, connection, "convert_from(binary_string('" + DATE_TIME_BE + "'), 'TIME_EPOCH_BE')", time);
+ runTest(bitContext, connection, "(convert_from(binary_string('" + DATE_TIME_BE + "'), 'TIME_EPOCH_BE'))", time);
}
@Test
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
index 48600a0..79f7b24 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestDecimal.java
@@ -80,8 +80,8 @@ public class TestDecimal extends PopUnitTestBase{
for (int i = 0; i < dec9Accessor.getValueCount(); i++) {
- assertEquals(dec9Accessor.getObject(i), decimal9Output[i]);
- assertEquals(dec18Accessor.getObject(i), decimal18Output[i]);
+ assertEquals(dec9Accessor.getObject(i).toString(), decimal9Output[i]);
+ assertEquals(dec18Accessor.getObject(i).toString(), decimal18Output[i]);
}
assertEquals(6, dec9Accessor.getValueCount());
assertEquals(6, dec18Accessor.getValueCount());
@@ -161,9 +161,9 @@ public class TestDecimal extends PopUnitTestBase{
ValueVector.Accessor mulAccessor = itr.next().getValueVector().getAccessor();
for (int i = 0; i < addAccessor.getValueCount(); i++) {
- assertEquals(addAccessor.getObject(i), addOutput[i]);
- assertEquals(subAccessor.getObject(i), subtractOutput[i]);
- assertEquals(mulAccessor.getObject(i), multiplyOutput[i]);
+ assertEquals(addAccessor.getObject(i).toString(), addOutput[i]);
+ assertEquals(subAccessor.getObject(i).toString(), subtractOutput[i]);
+ assertEquals(mulAccessor.getObject(i).toString(), multiplyOutput[i]);
}
assertEquals(6, addAccessor.getValueCount());
@@ -206,9 +206,9 @@ public class TestDecimal extends PopUnitTestBase{
ValueVector.Accessor mulAccessor = itr.next().getValueVector().getAccessor();
for (int i = 0; i < addAccessor.getValueCount(); i++) {
- assertEquals(addAccessor.getObject(i), addOutput[i]);
- assertEquals(subAccessor.getObject(i), subtractOutput[i]);
- assertEquals(mulAccessor.getObject(i), multiplyOutput[i]);
+ assertEquals(addAccessor.getObject(i).toString(), addOutput[i]);
+ assertEquals(subAccessor.getObject(i).toString(), subtractOutput[i]);
+ assertEquals(mulAccessor.getObject(i).toString(), multiplyOutput[i]);
}
assertEquals(7, addAccessor.getValueCount());
assertEquals(7, subAccessor.getValueCount());
@@ -255,7 +255,7 @@ public class TestDecimal extends PopUnitTestBase{
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
for (int i = 0; i < accessor.getValueCount(); i++) {
- assertEquals(sortOutput[i], accessor.getObject(i));
+ assertEquals(sortOutput[i], accessor.getObject(i).toString());
}
assertEquals(10, accessor.getValueCount());
}
@@ -293,7 +293,7 @@ public class TestDecimal extends PopUnitTestBase{
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
for (int i = 0; i < accessor.getValueCount(); i++) {
- assertEquals(accessor.getObject(i), sortOutput[i]);
+ assertEquals(accessor.getObject(i).toString(), sortOutput[i]);
}
assertEquals(7, accessor.getValueCount());
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestExtractFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestExtractFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestExtractFunctions.java
index 74f1698..a4f10d0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestExtractFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestExtractFunctions.java
@@ -126,7 +126,7 @@ public class TestExtractFunctions extends PopUnitTestBase {
for(int i=0; i<expectedValues.length; i++) {
for(int j=0; j<expectedValues[i].length; j++) {
NullableBigIntVector vv =
- (NullableBigIntVector) batchLoader.getValueAccessorById(j, NullableBigIntVector.class).getValueVector();
+ (NullableBigIntVector) batchLoader.getValueAccessorById(NullableBigIntVector.class, j).getValueVector();
System.out.println("["+i+"]["+j+"]: Expected: " + expectedValues[i][j] + ", Actual: " + vv.getAccessor().get(i));
assertEquals(expectedValues[i][j], vv.getAccessor().get(i));
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
index 4a849d1..adfd9bc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestImplicitCastFunctions.java
@@ -135,8 +135,8 @@ public class TestImplicitCastFunctions extends ExecTest {
Object [] expected = new Object[5];
expected [0] = new Integer (0);
expected [1] = new Integer (0);
- expected [2] = new Float (-4.2949673E9);
- expected [3] = new Float (-4.2949673E9);
+ expected [2] = new Float (-2.14748365E9);
+ expected [3] = new Float (-2.14748365E9);
expected [4] = new Double (-9.223372036854776E18);
runTest(bitContext, connection, expected, "functions/cast/testICastMockCol.json");
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestReverseImplicitCast.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestReverseImplicitCast.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestReverseImplicitCast.java
index a0b77a5..9285501 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestReverseImplicitCast.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestReverseImplicitCast.java
@@ -71,7 +71,7 @@ public class TestReverseImplicitCast extends PopUnitTestBase {
System.out.println(intAccessor1.getObject(i));
assertEquals(intAccessor1.getObject(i), 10);
System.out.println(varcharAccessor1.getObject(i));
- assertEquals(varcharAccessor1.getObject(i), "101");
+ assertEquals(varcharAccessor1.getObject(i).toString(), "101");
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
index 25b62ad..7ff165b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TopN/TestSimpleTopN.java
@@ -92,7 +92,7 @@ public class TestSimpleTopN extends PopUnitTestBase {
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
- BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldId(), BigIntVector.class).getValueVector();
+ BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
BigIntVector.Accessor a1 = c1.getAccessor();
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestAgg.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestAgg.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestAgg.java
index 927319a..ecfc10a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestAgg.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/agg/TestAgg.java
@@ -81,8 +81,8 @@ public class TestAgg extends ExecTest {
int[] keyArr = {Integer.MIN_VALUE, Integer.MAX_VALUE};
for(int i =0; i < exec.getRecordCount(); i++){
- assertEquals(cntArr[i], cnt.getAccessor().getObject(i));
- assertEquals(keyArr[i], key.getAccessor().getObject(i));
+ assertEquals((Long) cntArr[i], cnt.getAccessor().getObject(i));
+ assertEquals((Integer) keyArr[i], key.getAccessor().getObject(i));
}
}
@@ -102,7 +102,7 @@ public class TestAgg extends ExecTest {
BigIntVector key2 = exec.getValueVectorById(SchemaPath.getSimplePath("key2"), BigIntVector.class);
BigIntVector cnt = exec.getValueVectorById(SchemaPath.getSimplePath("cnt"), BigIntVector.class);
BigIntVector total = exec.getValueVectorById(SchemaPath.getSimplePath("total"), BigIntVector.class);
- int[] keyArr1 = {Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE};
+ Integer[] keyArr1 = {Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE};
long[] keyArr2 = {0,1,2,0,1,2};
long[] cntArr = {34,34,34,34,34,34};
long[] totalArr = {0,34,68,0,34,68};
@@ -116,10 +116,10 @@ public class TestAgg extends ExecTest {
// System.out.print("\t");
// System.out.print(total.getAccessor().getObject(i));
// System.out.println();
- assertEquals(cntArr[i], cnt.getAccessor().getObject(i));
+ assertEquals((Long) cntArr[i], cnt.getAccessor().getObject(i));
assertEquals(keyArr1[i], key1.getAccessor().getObject(i));
- assertEquals(keyArr2[i], key2.getAccessor().getObject(i));
- assertEquals(totalArr[i], total.getAccessor().getObject(i));
+ assertEquals((Long) keyArr2[i], key2.getAccessor().getObject(i));
+ assertEquals((Long) totalArr[i], total.getAccessor().getObject(i));
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
index 06d8a32..25af66d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java
@@ -68,6 +68,7 @@ public class TestMergeJoin extends PopUnitTestBase {
DrillConfig c = DrillConfig.create();
@Test
+ @Ignore // this doesn't have a sort. it also causes an infinite loop. these may or may not be related.
public void simpleEqualityJoin(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
index 699f075..136b7c1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
@@ -81,12 +81,12 @@ public class TestOrderedPartitionExchange extends PopUnitTestBase {
count += rows;
RecordBatchLoader loader = new RecordBatchLoader(new BootStrapContext(DrillConfig.create()).getAllocator());
loader.load(b.getHeader().getDef(), b.getData());
- BigIntVector vv1 = (BigIntVector)loader.getValueAccessorById(loader.getValueVectorId(
- new SchemaPath("col1", ExpressionPosition.UNKNOWN)).getFieldId(), BigIntVector.class).getValueVector();
- Float8Vector vv2 = (Float8Vector)loader.getValueAccessorById(loader.getValueVectorId(
- new SchemaPath("col2", ExpressionPosition.UNKNOWN)).getFieldId(), Float8Vector.class).getValueVector();
- IntVector pVector = (IntVector)loader.getValueAccessorById(loader.getValueVectorId(
- new SchemaPath("partition", ExpressionPosition.UNKNOWN)).getFieldId(), IntVector.class).getValueVector();
+ BigIntVector vv1 = (BigIntVector)loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(
+ new SchemaPath("col1", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
+ Float8Vector vv2 = (Float8Vector)loader.getValueAccessorById(Float8Vector.class, loader.getValueVectorId(
+ new SchemaPath("col2", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
+ IntVector pVector = (IntVector)loader.getValueAccessorById(IntVector.class, loader.getValueVectorId(
+ new SchemaPath("partition", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
long previous1 = Long.MIN_VALUE;
double previous2 = Double.MIN_VALUE;
int partPrevious = -1;
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
index 6ddb73d..c8261aa 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
@@ -66,8 +66,8 @@ public class TestWriter extends BaseTestQuery {
QueryResultBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
- VarCharVector fragmentIdV = (VarCharVector) batchLoader.getValueAccessorById(0, VarCharVector.class).getValueVector();
- BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(1, BigIntVector.class).getValueVector();
+ VarCharVector fragmentIdV = (VarCharVector) batchLoader.getValueAccessorById(VarCharVector.class, 0).getValueVector();
+ BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
// expected only one row in output
assertEquals(1, batchLoader.getRecordCount());
@@ -137,7 +137,7 @@ public class TestWriter extends BaseTestQuery {
if (batchLoader.getRecordCount() <= 0)
continue;
- BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(1, BigIntVector.class).getValueVector();
+ BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
for (int i = 0; i < batchLoader.getRecordCount(); i++) {
recordsWritten += recordWrittenV.getAccessor().get(i);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
index 5de0ad7..df123e7 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
@@ -74,7 +74,7 @@ public class TestSimpleExternalSort extends PopUnitTestBase {
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
- BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldId(), BigIntVector.class).getValueVector();
+ BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
BigIntVector.Accessor a1 = c1.getAccessor();
@@ -127,7 +127,7 @@ public class TestSimpleExternalSort extends PopUnitTestBase {
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
- BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldId(), BigIntVector.class).getValueVector();
+ BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
BigIntVector.Accessor a1 = c1.getAccessor();
@@ -177,7 +177,7 @@ public class TestSimpleExternalSort extends PopUnitTestBase {
batchCount++;
RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
loader.load(b.getHeader().getDef(),b.getData());
- BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldId(), BigIntVector.class).getValueVector();
+ BigIntVector c1 = (BigIntVector) loader.getValueAccessorById(BigIntVector.class, loader.getValueVectorId(new SchemaPath("blue", ExpressionPosition.UNKNOWN)).getFieldIds()).getValueVector();
BigIntVector.Accessor a1 = c1.getAccessor();
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
index 78fb1c1..8a31a27 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/ExpressionTreeMaterializerTest.java
@@ -41,8 +41,6 @@ import org.apache.drill.exec.ExecTest;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
-import org.apache.drill.exec.proto.SchemaDefProtos.FieldDef;
-import org.apache.drill.exec.proto.SchemaDefProtos.NamePart;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
@@ -60,8 +58,7 @@ public class ExpressionTreeMaterializerTest extends ExecTest {
FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
private MaterializedField getField(int fieldId, String name, MajorType type) {
- return new MaterializedField(FieldDef.newBuilder().setMajorType(type).addName(NamePart.newBuilder().setName(name))
- .build());
+ return MaterializedField.create(SchemaPath.getSimplePath(name), type);
}
@Test
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
index 5d6a37f..661cf67 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
@@ -93,9 +93,9 @@ public class TestDateTypes extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- assertTrue((accessor.getObject(0)).equals("1970-01-02"));
- assertTrue((accessor.getObject(1)).equals("2008-12-28"));
- assertTrue((accessor.getObject(2)).equals("2000-02-27"));
+ assertEquals((accessor.getObject(0).toString()), ("1970-01-02"));
+ assertEquals((accessor.getObject(1).toString()), ("2008-12-28"));
+ assertEquals((accessor.getObject(2).toString()), ("2000-02-27"));
}
batchLoader.clear();
@@ -127,9 +127,9 @@ public class TestDateTypes extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- assertEquals((accessor.getObject(0)), new String("1970-01-02"));
- assertEquals((accessor.getObject(1)), new String("2000-02-27"));
- assertEquals((accessor.getObject(2)), new String("2008-12-28"));
+ assertEquals((accessor.getObject(0).toString()), new String("1970-01-02"));
+ assertEquals((accessor.getObject(1).toString()), new String("2000-02-27"));
+ assertEquals((accessor.getObject(2).toString()), new String("2008-12-28"));
}
batchLoader.clear();
@@ -161,9 +161,9 @@ public class TestDateTypes extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- assertTrue(accessor.getObject(0).equals("1970-01-02 10:20:33.000"));
- assertTrue((accessor.getObject(1)).equals("2008-12-28 11:34:00.129"));
- assertTrue((accessor.getObject(2)).equals("2000-02-27 14:24:00.000"));
+ assertEquals(accessor.getObject(0).toString() ,"1970-01-02 10:20:33.000");
+ assertEquals(accessor.getObject(1).toString() ,"2008-12-28 11:34:00.129");
+ assertEquals(accessor.getObject(2).toString(), "2000-02-27 14:24:00.000");
}
batchLoader.clear();
@@ -196,32 +196,33 @@ public class TestDateTypes extends PopUnitTestBase {
ValueVector.Accessor accessor = itr.next().getValueVector().getAccessor();
// Check the interval type
- assertTrue((accessor.getObject(0)).equals("2 years 2 months 1 day 1:20:35.0"));
- assertTrue((accessor.getObject(1)).equals("2 years 2 months 0 days 0:0:0.0"));
- assertTrue((accessor.getObject(2)).equals("0 years 0 months 0 days 1:20:35.0"));
- assertTrue((accessor.getObject(3)).equals("2 years 2 months 1 day 1:20:35.897"));
- assertTrue((accessor.getObject(4)).equals("0 years 0 months 0 days 0:0:35.4"));
- assertTrue((accessor.getObject(5)).equals("1 year 10 months 1 day 0:-39:-25.0"));
+ assertEquals((accessor.getObject(0).toString()), ("2 years 2 months 1 day 1:20:35.0"));
+ assertEquals((accessor.getObject(1).toString()), ("2 years 2 months 0 days 0:0:0.0"));
+ assertEquals((accessor.getObject(2).toString()), ("0 years 0 months 0 days 1:20:35.0"));
+ assertEquals((accessor.getObject(3).toString()),("2 years 2 months 1 day 1:20:35.897"));
+ assertEquals((accessor.getObject(4).toString()), ("0 years 0 months 0 days 0:0:35.4"));
+ assertEquals((accessor.getObject(5).toString()), ("1 year 10 months 1 day 0:-39:-25.0"));
accessor = itr.next().getValueVector().getAccessor();
// Check the interval year type
- assertTrue((accessor.getObject(0)).equals("2 years 2 months "));
- assertTrue((accessor.getObject(1)).equals("2 years 2 months "));
- assertTrue((accessor.getObject(2)).equals("0 years 0 months "));
- assertTrue((accessor.getObject(3)).equals("2 years 2 months "));
- assertTrue((accessor.getObject(4)).equals("0 years 0 months "));
- assertTrue((accessor.getObject(5)).equals("1 year 10 months "));
+ assertEquals((accessor.getObject(0).toString()), ("2 years 2 months "));
+ assertEquals((accessor.getObject(1).toString()), ("2 years 2 months "));
+ assertEquals((accessor.getObject(2).toString()), ("0 years 0 months "));
+ assertEquals((accessor.getObject(3).toString()), ("2 years 2 months "));
+ assertEquals((accessor.getObject(4).toString()), ("0 years 0 months "));
+ assertEquals((accessor.getObject(5).toString()), ("1 year 10 months "));
+
accessor = itr.next().getValueVector().getAccessor();
// Check the interval day type
- assertTrue((accessor.getObject(0)).equals("1 day 1:20:35.0"));
- assertTrue((accessor.getObject(1)).equals("0 days 0:0:0.0"));
- assertTrue((accessor.getObject(2)).equals("0 days 1:20:35.0"));
- assertTrue((accessor.getObject(3)).equals("1 day 1:20:35.897"));
- assertTrue((accessor.getObject(4)).equals("0 days 0:0:35.4"));
- assertTrue((accessor.getObject(5)).equals("1 day 0:-39:-25.0"));
+ assertEquals((accessor.getObject(0).toString()), ("1 day 1:20:35.0"));
+ assertEquals((accessor.getObject(1).toString()), ("0 days 0:0:0.0"));
+ assertEquals((accessor.getObject(2).toString()), ("0 days 1:20:35.0"));
+ assertEquals((accessor.getObject(3).toString()), ("1 day 1:20:35.897"));
+ assertEquals((accessor.getObject(4).toString()), ("0 days 0:0:35.4"));
+ assertEquals((accessor.getObject(5).toString()), ("1 day 0:-39:-25.0"));
batchLoader.clear();
for(QueryResultBatch b : results){
@@ -260,7 +261,7 @@ public class TestDateTypes extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- assertTrue((accessor.getObject(0)).equals(result[idx++]));
+ assertEquals((accessor.getObject(0).toString()), (result[idx++]));
}
batchLoader.clear();
@@ -292,7 +293,8 @@ public class TestDateTypes extends PopUnitTestBase {
ValueVector.Accessor accessor = v.getValueVector().getAccessor();
- assertTrue((accessor.getObject(0)).equals("2008-03-27"));
+ assertEquals((accessor.getObject(0).toString()), ("2008-03-27"));
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java
index 788d7f1..6dc87e9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java
@@ -21,13 +21,14 @@ import static org.junit.Assert.assertEquals;
import java.nio.charset.Charset;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.ExecTest;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.memory.TopLevelAllocator;
-import org.apache.drill.exec.proto.SchemaDefProtos.FieldDef;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.vector.BitVector;
import org.apache.drill.exec.vector.NullableFloat4Vector;
@@ -44,14 +45,11 @@ public class TestValueVector extends ExecTest {
public void testFixedType() {
// Build a required uint field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
- FieldDef.Builder defBuilder = FieldDef.newBuilder();
typeBuilder
.setMinorType(MinorType.UINT4)
.setMode(DataMode.REQUIRED)
.setWidth(4);
- defBuilder
- .setMajorType(typeBuilder.build());
- MaterializedField field = MaterializedField.create(defBuilder.build());
+ MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(""), typeBuilder.build());
// Create a new value vector for 1024 integers
UInt4Vector v = new UInt4Vector(field, allocator);
@@ -78,7 +76,7 @@ public class TestValueVector extends ExecTest {
public void testNullableVarLen2() {
// Build an optional varchar field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
- FieldDef.Builder defBuilder = FieldDef.newBuilder();
+ SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.VARCHAR)
.setMode(DataMode.OPTIONAL)
@@ -124,7 +122,7 @@ public class TestValueVector extends ExecTest {
public void testNullableFixedType() {
// Build an optional uint field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
- FieldDef.Builder defBuilder = FieldDef.newBuilder();
+ SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.UINT4)
.setMode(DataMode.OPTIONAL)
@@ -211,7 +209,7 @@ public class TestValueVector extends ExecTest {
public void testNullableFloat() {
// Build an optional float field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
- FieldDef.Builder defBuilder = FieldDef.newBuilder();
+ SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.FLOAT4)
.setMode(DataMode.OPTIONAL)
@@ -270,7 +268,7 @@ public class TestValueVector extends ExecTest {
public void testBitVector() {
// Build a required boolean field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
- FieldDef.Builder defBuilder = FieldDef.newBuilder();
+ SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.BIT)
.setMode(DataMode.REQUIRED)
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JSONRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JSONRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JSONRecordReaderTest.java
deleted file mode 100644
index 9887536..0000000
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JSONRecordReaderTest.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.json;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import com.google.common.base.Predicate;
-import com.google.common.collect.Iterables;
-
-import mockit.Expectations;
-import mockit.Injectable;
-
-import org.apache.drill.common.exceptions.ExecutionSetupException;
-import org.apache.drill.common.expression.ExpressionPosition;
-import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.types.TypeProtos.MinorType;
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.exec.ExecTest;
-import org.apache.drill.exec.exception.SchemaChangeException;
-import org.apache.drill.exec.expr.TypeHelper;
-import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.memory.TopLevelAllocator;
-import org.apache.drill.exec.ops.FragmentContext;
-import org.apache.drill.exec.physical.impl.OutputMutator;
-import org.apache.drill.exec.proto.SchemaDefProtos;
-import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.store.easy.json.JSONRecordReader;
-import org.apache.drill.exec.vector.ValueVector;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.junit.*;
-
-import com.google.common.collect.Lists;
-
-
-public class JSONRecordReaderTest extends ExecTest {
- private static final Charset UTF_8 = Charset.forName("UTF-8");
-
- private static MockOutputMutator mutator = new MockOutputMutator();
-
- private String getResource(String resourceName) {
- return "resource:" + resourceName;
- }
-
- @After
- public void setup() {
- for (ValueVector v: mutator.getAddFields()) {
- v.clear();
- }
- mutator.removeAllFields();
- mutator.removedFields.clear();
- }
- @AfterClass
- public static void cleanup() {
- mutator.close();
- }
-
- static class MockOutputMutator implements OutputMutator {
- public List<MaterializedField> removedFields = Lists.newArrayList();
- List<ValueVector> addFields = Lists.newArrayList();
- private BufferAllocator allocator = new TopLevelAllocator();
-
- @Override
- public void removeField(MaterializedField field) throws SchemaChangeException {
- removedFields.add(field);
- }
-
- @Override
- public void addField(ValueVector vector) throws SchemaChangeException {
- addFields.add(vector);
- }
-
- @Override
- public void removeAllFields() {
- addFields.clear();
- }
-
- @Override
- public void setNewSchema() throws SchemaChangeException {
- }
-
- List<MaterializedField> getRemovedFields() {
- return removedFields;
- }
-
- List<ValueVector> getAddFields() {
- return addFields;
- }
-
- @Override
- public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz) throws SchemaChangeException {
- ValueVector v = TypeHelper.getNewVector(field, allocator);
- if(!clazz.isAssignableFrom(v.getClass())) throw new SchemaChangeException(String.format("The class that was provided %s does not correspond to the expected vector type of %s.", clazz.getSimpleName(), v.getClass().getSimpleName()));
- addField(v);
- return (T) v;
- }
-
- public void close() {
- allocator.close();
- }
- }
-
- private <T> void assertField(ValueVector valueVector, int index, MinorType expectedMinorType, T value, String name) {
- UserBitShared.FieldMetadata metadata = valueVector.getMetadata();
- SchemaDefProtos.FieldDef def = metadata.getDef();
- assertEquals(expectedMinorType, def.getMajorType().getMinorType());
- String[] parts = name.split("\\.");
- long expected = parts.length;
- assertEquals(expected, def.getNameList().size());
- for(int i = 0; i < parts.length; ++i) {
- assertEquals(parts[i], def.getName(i).getName());
- }
-
- if (expectedMinorType == MinorType.MAP) {
- return;
- }
-
- T val = (T) valueVector.getAccessor().getObject(index);
- assertValue(value, val);
- }
-
- private void assertValue(Object expected, Object found) {
- if (found instanceof byte[]) {
- assertTrue(Arrays.equals((byte[]) expected, (byte[]) found));
- } else if(found instanceof ArrayList) {
- List expectedArray = (List) expected;
- List foundArray = (List) found;
- assertEquals(expectedArray.size(), foundArray.size());
- for(int i = 0; i < expectedArray.size(); ++i) {
- assertValue(expectedArray.get(i), foundArray.get(i));
- }
- } else {
- assertEquals(expected, found);
- }
- }
-
- @Test
- public void testSameSchemaInSameBatch(@Injectable final FragmentContext context) throws IOException,
- ExecutionSetupException {
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_1.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()), null);
-
- List<ValueVector> addFields = mutator.getAddFields();
- jr.setup(mutator);
- assertEquals(2, jr.next());
- assertEquals(3, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 123L, "test");
- assertField(addFields.get(1), 0, MinorType.BIT, true, "b");
- assertField(addFields.get(2), 0, MinorType.VARCHAR, "hi!", "c");
- assertField(addFields.get(0), 1, MinorType.BIGINT, 1234L, "test");
- assertField(addFields.get(1), 1, MinorType.BIT, false, "b");
- assertField(addFields.get(2), 1, MinorType.VARCHAR, "drill!", "c");
-
- assertEquals(0, jr.next());
- assertTrue(mutator.getRemovedFields().isEmpty());
- }
-
- @Test
- public void testChangedSchemaInSameBatch(@Injectable final FragmentContext context) throws IOException,
- ExecutionSetupException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_2.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()), null);
-
- List<ValueVector> addFields = mutator.getAddFields();
-
- jr.setup(mutator);
- assertEquals(3, jr.next());
- assertEquals(7, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 123L, "test");
- assertField(addFields.get(1), 0, MinorType.BIGINT, 1L, "b");
- assertField(addFields.get(2), 0, MinorType.FLOAT4, (float) 2.15, "c");
- assertField(addFields.get(3), 0, MinorType.BIT, true, "bool");
- assertField(addFields.get(4), 0, MinorType.VARCHAR, "test1", "str1");
-
- assertField(addFields.get(0), 1, MinorType.BIGINT, 1234L, "test");
- assertField(addFields.get(1), 1, MinorType.BIGINT, 3L, "b");
- assertField(addFields.get(3), 1, MinorType.BIT, false, "bool");
- assertField(addFields.get(4), 1, MinorType.VARCHAR, "test2", "str1");
- assertField(addFields.get(5), 1, MinorType.BIGINT, 4L, "d");
-
- assertField(addFields.get(0), 2, MinorType.BIGINT, 12345L, "test");
- assertField(addFields.get(2), 2, MinorType.FLOAT4, (float) 5.16, "c");
- assertField(addFields.get(3), 2, MinorType.BIT, true, "bool");
- assertField(addFields.get(5), 2, MinorType.BIGINT, 6L, "d");
- assertField(addFields.get(6), 2, MinorType.VARCHAR, "test3", "str2");
- assertTrue(mutator.getRemovedFields().isEmpty());
- assertEquals(0, jr.next());
- }
-
- @Test
- public void testChangedSchemaInTwoBatchesColumnSelect(@Injectable final FragmentContext context) throws IOException,
- ExecutionSetupException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_2.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()),
- 64, Arrays.asList(new SchemaPath("test", ExpressionPosition.UNKNOWN))); // batch only fits 1 int
- List<ValueVector> addFields = mutator.getAddFields();
- List<MaterializedField> removedFields = mutator.getRemovedFields();
-
- jr.setup(mutator);
- assertEquals(1, jr.next());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 123L, "test");
- assertTrue(removedFields.isEmpty());
- assertEquals(addFields.size(), 1);
- assertEquals(1, jr.next());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 1234L, "test");
- assertEquals(addFields.size(), 1);
- assertTrue(removedFields.isEmpty());
- removedFields.clear();
- assertEquals(1, jr.next());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 12345L, "test");
- assertEquals(addFields.size(), 1);
- assertTrue(removedFields.isEmpty());
- assertEquals(0, jr.next());
- }
-
- @Test
- public void testChangedSchemaInTwoBatches(@Injectable final FragmentContext context) throws IOException,
- ExecutionSetupException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_2.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()),
- 64, null); // batch only fits 1 int
- List<ValueVector> addFields = mutator.getAddFields();
- List<MaterializedField> removedFields = mutator.getRemovedFields();
-
- jr.setup(mutator);
- assertEquals(1, jr.next());
- assertEquals(5, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 123L, "test");
- assertField(addFields.get(1), 0, MinorType.BIGINT, 1L, "b");
- assertField(addFields.get(2), 0, MinorType.FLOAT4, (float) 2.15, "c");
- assertField(addFields.get(3), 0, MinorType.BIT, true, "bool");
- assertField(addFields.get(4), 0, MinorType.VARCHAR, "test1", "str1");
- assertTrue(removedFields.isEmpty());
- assertEquals(1, jr.next());
- assertEquals(6, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 1234L, "test");
- assertField(addFields.get(1), 0, MinorType.BIGINT, 3L, "b");
- assertField(addFields.get(3), 0, MinorType.BIT, false, "bool");
- assertField(addFields.get(4), 0, MinorType.VARCHAR, "test2", "str1");
- assertField(addFields.get(5), 0, MinorType.BIGINT, 4L, "d");
- assertEquals(1, removedFields.size());
- assertEquals("c", removedFields.get(0).getAsSchemaPath().getRootSegment().getPath());
- removedFields.clear();
- assertEquals(1, jr.next());
- assertEquals(7, addFields.size()); // The reappearing of field 'c' is also included
- assertField(addFields.get(0), 0, MinorType.BIGINT, 12345L, "test");
- assertField(addFields.get(3), 0, MinorType.BIT, true, "bool");
- assertField(addFields.get(5), 0, MinorType.BIGINT, 6L, "d");
- assertField(addFields.get(2), 0, MinorType.FLOAT4, (float) 5.16, "c");
- assertField(addFields.get(6), 0, MinorType.VARCHAR, "test3", "str2");
- assertEquals(2, removedFields.size());
- Iterables.find(removedFields, new Predicate<MaterializedField>() {
- @Override
- public boolean apply(MaterializedField materializedField) {
- return materializedField.getAsSchemaPath().getRootSegment().getPath().equals("str1");
- }
- });
- Iterables.find(removedFields, new Predicate<MaterializedField>() {
- @Override
- public boolean apply(MaterializedField materializedField) {
- return materializedField.getAsSchemaPath().getRootSegment().getPath().equals("b");
- }
- });
- assertEquals(0, jr.next());
- }
-
- @Test
- @Ignore // until repeated map
- public void testNestedFieldInSameBatch(@Injectable final FragmentContext context) throws ExecutionSetupException, IOException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_3.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()), null);
-
- List<ValueVector> addFields = mutator.getAddFields();
- jr.setup(mutator);
- assertEquals(2, jr.next());
- assertEquals(3, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 123L, "test");
- assertField(addFields.get(1), 0, MinorType.VARCHAR, "test", "a.b");
- assertField(addFields.get(2), 0, MinorType.BIT, true, "a.a.d");
- assertField(addFields.get(0), 1, MinorType.BIGINT, 1234L, "test");
- assertField(addFields.get(1), 1, MinorType.VARCHAR, "test2", "a.b");
- assertField(addFields.get(2), 1, MinorType.BIT, false, "a.a.d");
-
- assertEquals(0, jr.next());
- assertTrue(mutator.getRemovedFields().isEmpty());
- }
-
- @Test
- @Ignore // until repeated map is added.
- public void testRepeatedFields(@Injectable final FragmentContext context) throws ExecutionSetupException, IOException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_4.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()), null);
-
- List<ValueVector> addFields = mutator.getAddFields();
- jr.setup(mutator);
- assertEquals(2, jr.next());
- assertEquals(7, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, 123L, "test");
- assertField(addFields.get(1), 0, MinorType.BIGINT, Arrays.asList(1L, 2L, 3L), "test2");
- assertField(addFields.get(2), 0, MinorType.BIGINT, Arrays.asList(4L, 5L, 6L), "test3.a");
- assertField(addFields.get(3), 0, MinorType.BIGINT, Arrays.asList(7L, 8L, 9L), "test3.b");
- assertField(addFields.get(4), 0, MinorType.BIGINT, Arrays.asList(10L, 11L, 12L), "test3.c.d");
- assertField(addFields.get(5), 0, MinorType.FLOAT4, Arrays.<Float>asList((float) 1.1, (float) 1.2, (float) 1.3), "testFloat");
- assertField(addFields.get(6), 0, MinorType.VARCHAR, Arrays.asList("hello", "drill"), "testStr");
- assertField(addFields.get(1), 1, MinorType.BIGINT, Arrays.asList(1L, 2L), "test2");
- assertField(addFields.get(2), 1, MinorType.BIGINT, Arrays.asList(7L, 7L, 7L, 8L), "test3.a");
- assertField(addFields.get(5), 1, MinorType.FLOAT4, Arrays.<Float>asList((float) 2.2, (float) 2.3,(float) 2.4), "testFloat");
-
- assertEquals(0, jr.next());
- assertTrue(mutator.getRemovedFields().isEmpty());
- }
-
- @Test
- public void testRepeatedMissingFields(@Injectable final FragmentContext context) throws ExecutionSetupException, IOException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_5.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()), null);
-
- List<ValueVector> addFields = mutator.getAddFields();
- jr.setup(mutator);
- assertEquals(9, jr.next());
- assertEquals(1, addFields.size());
- assertField(addFields.get(0), 0, MinorType.BIGINT, Arrays.<Long>asList(), "test");
- assertField(addFields.get(0), 1, MinorType.BIGINT, Arrays.asList(1L, 2L, 3L), "test");
- assertField(addFields.get(0), 2, MinorType.BIGINT, Arrays.<Long>asList(), "test");
- assertField(addFields.get(0), 3, MinorType.BIGINT, Arrays.<Long>asList(), "test");
- assertField(addFields.get(0), 4, MinorType.BIGINT, Arrays.asList(4L, 5L, 6L), "test");
- assertField(addFields.get(0), 5, MinorType.BIGINT, Arrays.<Long>asList(), "test");
- assertField(addFields.get(0), 6, MinorType.BIGINT, Arrays.<Long>asList(), "test");
- assertField(addFields.get(0), 7, MinorType.BIGINT, Arrays.asList(7L, 8L, 9L), "test");
- assertField(addFields.get(0), 8, MinorType.BIGINT, Arrays.<Long>asList(), "test");
-
-
- assertEquals(0, jr.next());
- assertTrue(mutator.getRemovedFields().isEmpty());
- }
-
- @Test
- public void testJsonArrayandNormalFields(@Injectable final FragmentContext context) throws ExecutionSetupException, IOException {
-
- JSONRecordReader jr = new JSONRecordReader(context,
- FileUtils.getResourceAsFile("/scan_json_test_7.json").toURI().toString(),
- FileSystem.getLocal(new Configuration()), null);
-
- List<ValueVector> addFields = mutator.getAddFields();
- jr.setup(mutator);
- assertEquals(2, jr.next());
- assertEquals(3, addFields.size());
-
- assertField(addFields.get(0), 0, MinorType.VARCHAR, "ABC", "test");
- assertField(addFields.get(2), 0, MinorType.VARCHAR, "drill", "a");
- assertField(addFields.get(0), 1, MinorType.VARCHAR, "abc", "test");
- assertField(addFields.get(2), 1, MinorType.VARCHAR, "apache", "a");
-
-
- assertEquals(0, jr.next());
- assertTrue(mutator.getRemovedFields().isEmpty());
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JsonRecordReader2Test.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JsonRecordReader2Test.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JsonRecordReader2Test.java
new file mode 100644
index 0000000..1372219
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/json/JsonRecordReader2Test.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.json;
+
+import org.apache.drill.BaseTestQuery;
+import org.junit.Test;
+
+public class JsonRecordReader2Test extends BaseTestQuery{
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JsonRecordReader2Test.class);
+
+ @Test
+ public void testComplexJsonInput() throws Exception{
+// test("select z[0]['orange'] from cp.`jsoninput/input2.json` limit 10");
+ test("select `integer`, x['y'] as x1, x['y'] as x2, z[0], z[0]['orange'], z[1]['pink'] from cp.`jsoninput/input2.json` limit 10");
+// test("select z[0] from cp.`jsoninput/input2.json` limit 10");
+ }
+
+ @Test
+ public void z() throws Exception{
+ test("select * from cp.`join/merge_join.json`");
+ }
+
+ @Test
+ public void y() throws Exception{
+ test("select * from cp.`limit/test1.json` limit 10");
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
new file mode 100644
index 0000000..7bc4a40
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.writer;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.ByteArrayOutputStream;
+import java.util.List;
+
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.TopLevelAllocator;
+import org.apache.drill.exec.vector.complex.MapVector;
+import org.apache.drill.exec.vector.complex.fn.JsonReader;
+import org.apache.drill.exec.vector.complex.fn.JsonWriter;
+import org.apache.drill.exec.vector.complex.fn.ReaderJSONRecordSplitter;
+import org.apache.drill.exec.vector.complex.impl.ComplexWriterImpl;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.google.common.base.Charsets;
+import com.google.hive12.common.collect.Lists;
+
+public class TestJsonReader {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestJsonReader.class);
+
+ private static BufferAllocator allocator;
+
+ @BeforeClass
+ public static void setupAllocator(){
+ allocator = new TopLevelAllocator();
+ }
+
+ @AfterClass
+ public static void destroyAllocator(){
+ allocator.close();
+ }
+
+ @Test
+ public void testReader() throws Exception{
+ final int repeatSize = 10;
+
+ String simple = " { \"b\": \"hello\", \"c\": \"goodbye\"}\n " +
+ "{ \"b\": \"yellow\", \"c\": \"red\", \"p\":" +
+ "{ \"integer\" : 2001, \n" +
+ " \"float\" : 1.2,\n" +
+ " \"x\": {\n" +
+ " \"y\": \"friends\",\n" +
+ " \"z\": \"enemies\"\n" +
+ " },\n" +
+ " \"z\": [\n" +
+ " {\"orange\" : \"black\" },\n" +
+ " {\"pink\" : \"purple\" }\n" +
+ " ]\n" +
+ " \n" +
+ "}\n }";
+
+ String compound = simple;
+ for(int i =0; i < repeatSize; i++) compound += simple;
+
+// simple = "{ \"integer\" : 2001, \n" +
+// " \"float\" : 1.2\n" +
+// "}\n" +
+// "{ \"integer\" : -2002,\n" +
+// " \"float\" : -1.2 \n" +
+// "}";
+ MapVector v = new MapVector("", allocator);
+ ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
+ writer.allocate();
+
+
+ JsonReader jsonReader = new JsonReader(new ReaderJSONRecordSplitter(compound));
+ int i =0;
+ List<Integer> batchSizes = Lists.newArrayList();
+
+ outside: while(true){
+ writer.setPosition(i);
+ switch(jsonReader.write(writer)){
+ case WRITE_SUCCEED:
+ i++;
+ break;
+ case NO_MORE:
+ batchSizes.add(i);
+ System.out.println("no more records - main loop");
+ break outside;
+
+ case WRITE_FAILED:
+ System.out.println("==== hit bounds at " + i);
+ //writer.setValueCounts(i - 1);
+ batchSizes.add(i);
+ i = 0;
+ writer.allocate();
+ writer.reset();
+
+ switch(jsonReader.write(writer)){
+ case NO_MORE:
+ System.out.println("no more records - new alloc loop.");
+ break outside;
+ case WRITE_FAILED:
+ throw new RuntimeException("Failure while trying to write.");
+ case WRITE_SUCCEED:
+ i++;
+ };
+
+ };
+ }
+
+ int total = 0;
+ int lastRecordCount = 0;
+ for(Integer records : batchSizes){
+ total += records;
+ lastRecordCount = records;
+ }
+
+
+ ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
+
+ ow.writeValueAsString(v.getAccessor().getObject(0));
+ ow.writeValueAsString(v.getAccessor().getObject(1));
+ FieldReader reader = v.get("col", MapVector.class).getAccessor().getReader();
+
+ ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ JsonWriter jsonWriter = new JsonWriter(stream, true);
+
+ reader.setPosition(0);
+ jsonWriter.write(reader);
+ reader.setPosition(1);
+ jsonWriter.write(reader);
+ System.out.print("Json Read: ");
+ System.out.println(new String(stream.toByteArray(), Charsets.UTF_8));
+// System.out.println(compound);
+
+ System.out.println("Total Records Written " + batchSizes);
+
+ reader.setPosition(lastRecordCount - 2);
+ assertEquals("goodbye", reader.reader("c").readText().toString());
+ reader.setPosition(lastRecordCount - 1);
+ assertEquals("red", reader.reader("c").readText().toString());
+ assertEquals((repeatSize+1) * 2, total);
+
+ writer.clear();
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
new file mode 100644
index 0000000..9c2e3b7
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestRepeated.java
@@ -0,0 +1,264 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.writer;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+
+import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.IntHolder;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.TopLevelAllocator;
+import org.apache.drill.exec.vector.complex.MapVector;
+import org.apache.drill.exec.vector.complex.fn.JsonWriter;
+import org.apache.drill.exec.vector.complex.impl.ComplexWriterImpl;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+import com.google.common.base.Charsets;
+
+public class TestRepeated {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestRepeated.class);
+
+ private static BufferAllocator allocator;
+
+ @BeforeClass
+ public static void setupAllocator(){
+ allocator = new TopLevelAllocator();
+ }
+
+ @AfterClass
+ public static void destroyAllocator(){
+ allocator.close();
+ }
+//
+// @Test
+// public void repeatedMap(){
+//
+// /**
+// * We're going to try to create an object that looks like:
+// *
+// * {
+// * a: [
+// * {x: 1, y: 2}
+// * {x: 2, y: 1}
+// * ]
+// * }
+// *
+// */
+// MapVector v = new MapVector("", allocator);
+// ComplexWriter writer = new ComplexWriterImpl("col", v);
+//
+// MapWriter map = writer.rootAsMap();
+//
+// map.start();
+// ListWriter list = map.list("a");
+// MapWriter inner = list.map();
+//
+// IntHolder holder = new IntHolder();
+// IntWriter xCol = inner.integer("x");
+// IntWriter yCol = inner.integer("y");
+//
+// inner.start();
+//
+// holder.value = 1;
+// xCol.write(holder);
+// holder.value = 2;
+// yCol.write(holder);
+//
+// inner.end();
+//
+// inner.start();
+//
+// holder.value = 2;
+// xCol.write(holder);
+// holder.value = 1;
+// yCol.write(holder);
+//
+// inner.end();
+//
+// IntWriter numCol = map.integer("nums");
+// holder.value = 14;
+// numCol.write(holder);
+//
+// map.end();
+//
+//
+// assert writer.ok();
+//
+// System.out.println(v.getAccessor().getObject(0));
+//
+// }
+
+ @Test
+ public void listOfList() throws IOException{
+ /**
+ * We're going to try to create an object that looks like:
+ *
+ * {
+ * a: [
+ * [1,2,3],
+ * [2,3,4]
+ * ],
+ * nums: 14,
+ * b: [
+ * { c: 1 },
+ * { c: 2 , x: 15}
+ * ]
+ * }
+ *
+ */
+
+ MapVector v = new MapVector("", allocator);
+ ComplexWriterImpl writer = new ComplexWriterImpl("col", v);
+ writer.allocate();
+
+ {
+ MapWriter map = writer.rootAsMap();
+ ListWriter list = map.list("a");
+ list.start();
+
+ ListWriter innerList = list.list();
+ IntWriter innerInt = innerList.integer();
+
+ innerList.start();
+
+ IntHolder holder = new IntHolder();
+
+ holder.value = 1;
+ innerInt.write(holder);
+ holder.value = 2;
+ innerInt.write(holder);
+ holder.value = 3;
+ innerInt.write(holder);
+
+ innerList.end();
+ innerList.start();
+
+ holder.value = 4;
+ innerInt.write(holder);
+ holder.value = 5;
+ innerInt.write(holder);
+
+ innerList.end();
+ list.end();
+
+ IntWriter numCol = map.integer("nums");
+ holder.value = 14;
+ numCol.write(holder);
+
+ MapWriter repeatedMap = map.list("b").map();
+ repeatedMap.start();
+ holder.value = 1;
+ repeatedMap.integer("c").write(holder);
+ repeatedMap.end();
+
+ repeatedMap.start();
+ holder.value = 2;
+ repeatedMap.integer("c").write(holder);
+ BigIntHolder h = new BigIntHolder();
+ h.value = 15;
+ repeatedMap.bigInt("x").write(h);
+ repeatedMap.end();
+
+ map.end();
+ }
+ assert writer.ok();
+
+ {
+ writer.setPosition(1);
+
+ MapWriter map = writer.rootAsMap();
+ ListWriter list = map.list("a");
+ list.start();
+
+ ListWriter innerList = list.list();
+ IntWriter innerInt = innerList.integer();
+
+ innerList.start();
+
+ IntHolder holder = new IntHolder();
+
+ holder.value = -1;
+ innerInt.write(holder);
+ holder.value = -2;
+ innerInt.write(holder);
+ holder.value = -3;
+ innerInt.write(holder);
+
+ innerList.end();
+ innerList.start();
+
+ holder.value = -4;
+ innerInt.write(holder);
+ holder.value = -5;
+ innerInt.write(holder);
+
+ innerList.end();
+ list.end();
+
+ IntWriter numCol = map.integer("nums");
+ holder.value = -28;
+ numCol.write(holder);
+
+ MapWriter repeatedMap = map.list("b").map();
+ repeatedMap.start();
+ holder.value = -1;
+ repeatedMap.integer("c").write(holder);
+ repeatedMap.end();
+
+ repeatedMap.start();
+ holder.value = -2;
+ repeatedMap.integer("c").write(holder);
+ BigIntHolder h = new BigIntHolder();
+ h.value = -30;
+ repeatedMap.bigInt("x").write(h);
+ repeatedMap.end();
+
+ map.end();
+ }
+
+
+ ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
+
+ System.out.println("Map of Object[0]: " + ow.writeValueAsString(v.getAccessor().getObject(0)));
+ System.out.println("Map of Object[1]: " + ow.writeValueAsString(v.getAccessor().getObject(1)));
+
+
+ ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ JsonWriter jsonWriter = new JsonWriter(stream, true);
+ FieldReader reader = v.get("col", MapVector.class).getAccessor().getReader();
+ reader.setPosition(0);
+ jsonWriter.write(reader);
+ reader.setPosition(1);
+ jsonWriter.write(reader);
+ System.out.print("Json Read: ");
+ System.out.println(new String(stream.toByteArray(), Charsets.UTF_8));
+
+ writer.clear();
+
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/resources/jsoninput/input2.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/jsoninput/input2.json b/exec/java-exec/src/test/resources/jsoninput/input2.json
new file mode 100644
index 0000000..4e044c2
--- /dev/null
+++ b/exec/java-exec/src/test/resources/jsoninput/input2.json
@@ -0,0 +1,40 @@
+{ "integer" : 2010,
+ "float" : 17.4,
+ "x": {
+ "y": "kevin",
+ "z": "paul"
+ },
+ "z": [
+ {"orange" : "yellow" , "pink": "red"},
+ {"pink" : "purple" }
+ ],
+ "l": [4,2],
+ "rl": [ [2,1], [4,6] ]
+}
+{ "integer" : -2002,
+ "float" : -1.2
+}
+{ "integer" : 2001,
+ "float" : 1.2,
+ "x": {
+ "y": "bill",
+ "z": "peter"
+ },
+ "z": [
+ {"pink" : "lilac" }
+ ],
+ "l": [4,2],
+ "rl": [ [2,1], [4,6] ]
+}
+{ "integer" : 6005,
+ "float" : 1.2,
+ "x": {
+ "y": "mike",
+ "z": "mary"
+ },
+ "z": [
+ {"orange" : "stucco" }
+ ],
+ "l": [4,2],
+ "rl": [ [2,1], [4,6] ]
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillAccessorList.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillAccessorList.java b/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillAccessorList.java
index 6394cb2..7594783 100644
--- a/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillAccessorList.java
+++ b/exec/jdbc/src/main/java/org/apache/drill/jdbc/DrillAccessorList.java
@@ -30,22 +30,22 @@ public class DrillAccessorList extends BasicList<Accessor>{
private Accessor[] accessors = new Accessor[0];
private int lastColumn = 1;
-
+
public void generateAccessors(DrillCursor cursor, RecordBatchLoader currentBatch){
int cnt = currentBatch.getSchema().getFieldCount();
accessors = new Accessor[cnt];
for(int i =0; i < cnt; i++){
- SqlAccessor acc = TypeHelper.getSqlAccessor(currentBatch.getValueAccessorById(i, null).getValueVector());
+ SqlAccessor acc = TypeHelper.getSqlAccessor(currentBatch.getValueAccessorById(null, i).getValueVector());
accessors[i] = new AvaticaDrillSqlAccessor(acc, cursor);
}
}
-
+
@Override
public Accessor get(int index) {
lastColumn = index;
return accessors[index];
}
-
+
public boolean wasNull() throws SQLException{
return accessors[lastColumn].wasNull();
}
@@ -54,5 +54,5 @@ public class DrillAccessorList extends BasicList<Accessor>{
public int size() {
return accessors.length;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestAggregateFunctionsQuery.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestAggregateFunctionsQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestAggregateFunctionsQuery.java
index 10c36cb..4a57ffb 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestAggregateFunctionsQuery.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestAggregateFunctionsQuery.java
@@ -39,6 +39,7 @@ import org.junit.rules.TestRule;
import com.google.common.base.Function;
import com.google.common.base.Stopwatch;
+@Ignore // until stream agg changing schema is fixed.
public class TestAggregateFunctionsQuery {
public static final String WORKING_PATH;
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
index 1212e9f..1c78b5f 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
@@ -403,6 +403,7 @@ public class TestFunctionsQuery {
}
@Test
+ @Ignore
public void testTimeStampConstant() throws Exception {
String query = String.format("SELECT " +
"timestamp '2008-2-23 12:23:23' as TS " +
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f903516..d135c56 100644
--- a/pom.xml
+++ b/pom.xml
@@ -501,6 +501,7 @@
<executions>
<execution>
<id>attach-sources</id>
+ <phase>verify</phase>
<goals>
<goal>jar</goal>
</goals>
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
----------------------------------------------------------------------
diff --git a/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java b/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
index 70125b2..9480ba4 100644
--- a/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
+++ b/protocol/src/main/java/org/apache/drill/common/types/TypeProtos.java
@@ -317,6 +317,10 @@ public final class TypeProtos {
* </pre>
*/
INTERVALDAY(35, 39),
+ /**
+ * <code>LIST = 40;</code>
+ */
+ LIST(36, 40),
;
/**
@@ -606,6 +610,10 @@ public final class TypeProtos {
* </pre>
*/
public static final int INTERVALDAY_VALUE = 39;
+ /**
+ * <code>LIST = 40;</code>
+ */
+ public static final int LIST_VALUE = 40;
public final int getNumber() { return value; }
@@ -648,6 +656,7 @@ public final class TypeProtos {
case 37: return NULL;
case 38: return INTERVALYEAR;
case 39: return INTERVALDAY;
+ case 40: return LIST;
default: return null;
}
}
@@ -1779,7 +1788,7 @@ public final class TypeProtos {
"inor_type\030\001 \001(\0162\021.common.MinorType\022\036\n\004mo" +
"de\030\002 \001(\0162\020.common.DataMode\022\r\n\005width\030\003 \001(" +
"\005\022\021\n\tprecision\030\004 \001(\005\022\r\n\005scale\030\005 \001(\005\022\020\n\010t" +
- "imeZone\030\006 \001(\005*\373\003\n\tMinorType\022\010\n\004LATE\020\000\022\007\n" +
+ "imeZone\030\006 \001(\005*\205\004\n\tMinorType\022\010\n\004LATE\020\000\022\007\n" +
"\003MAP\020\001\022\r\n\tREPEATMAP\020\002\022\013\n\007TINYINT\020\003\022\014\n\010SM" +
"ALLINT\020\004\022\007\n\003INT\020\005\022\n\n\006BIGINT\020\006\022\014\n\010DECIMAL" +
"9\020\007\022\r\n\tDECIMAL18\020\010\022\023\n\017DECIMAL28SPARSE\020\t\022" +
@@ -1792,9 +1801,9 @@ public final class TypeProtos {
"NT1\020\035\022\t\n\005UINT2\020\036\022\t\n\005UINT4\020\037\022\t\n\005UINT8\020 \022\022" +
"\n\016DECIMAL28DENSE\020!\022\022\n\016DECIMAL38DENSE\020\"\022\010" +
"\n\004NULL\020%\022\020\n\014INTERVALYEAR\020&\022\017\n\013INTERVALDA" +
- "Y\020\'*4\n\010DataMode\022\014\n\010OPTIONAL\020\000\022\014\n\010REQUIRE" +
- "D\020\001\022\014\n\010REPEATED\020\002B-\n\035org.apache.drill.co" +
- "mmon.typesB\nTypeProtosH\001"
+ "Y\020\'\022\010\n\004LIST\020(*4\n\010DataMode\022\014\n\010OPTIONAL\020\000\022" +
+ "\014\n\010REQUIRED\020\001\022\014\n\010REPEATED\020\002B-\n\035org.apach" +
+ "e.drill.common.typesB\nTypeProtosH\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
[08/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
index 5f26054..039445b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggTemplate.java
@@ -62,9 +62,9 @@ public abstract class HashAggTemplate implements HashAggregator {
private static final long ALLOCATOR_INITIAL_RESERVATION = 1*1024*1024;
private static final long ALLOCATOR_MAX_RESERVATION = 20L*1000*1000*1000;
-
+
private static final boolean EXTRA_DEBUG_1 = false;
- private static final boolean EXTRA_DEBUG_2 = false;
+ private static final boolean EXTRA_DEBUG_2 = false;
private static final String TOO_BIG_ERROR = "Couldn't add value to an empty batch. This likely means that a single value is too long for a varlen field.";
private boolean first = true;
private boolean newSchema = false;
@@ -88,7 +88,7 @@ public abstract class HashAggTemplate implements HashAggregator {
List<VectorAllocator> wsAllocators = Lists.newArrayList(); // allocators for the workspace vectors
ErrorCollector collector = new ErrorCollectorImpl();
-
+
private MaterializedField[] materializedValueFields;
private boolean allFlushed = false;
@@ -102,13 +102,13 @@ public abstract class HashAggTemplate implements HashAggregator {
aggrValuesContainer = new VectorContainer();
ValueVector vector ;
-
- for(int i = 0; i < materializedValueFields.length; i++) {
+
+ for(int i = 0; i < materializedValueFields.length; i++) {
MaterializedField outputField = materializedValueFields[i];
// Create a type-specific ValueVector for this value
vector = TypeHelper.getNewVector(outputField, allocator) ;
VectorAllocator.getAllocator(vector, 50 /* avg. width */).alloc(HashTable.BATCH_SIZE) ;
-
+
aggrValuesContainer.add(vector) ;
}
@@ -124,8 +124,8 @@ public abstract class HashAggTemplate implements HashAggregator {
setupInterior(incoming, outgoing, aggrValuesContainer);
}
- private boolean outputValues() {
- for (int i = 0; i <= maxOccupiedIdx; i++) {
+ private boolean outputValues() {
+ for (int i = 0; i <= maxOccupiedIdx; i++) {
if (outputRecordValues(i, outputCount) ) {
if (EXTRA_DEBUG_2) logger.debug("Outputting values to {}", outputCount) ;
outputCount++;
@@ -139,7 +139,7 @@ public abstract class HashAggTemplate implements HashAggregator {
private void clear() {
aggrValuesContainer.clear();
}
-
+
// Code-generated methods (implemented in HashAggBatch)
@RuntimeOverridden
@@ -155,19 +155,19 @@ public abstract class HashAggTemplate implements HashAggregator {
@Override
public void setup(HashAggregate hashAggrConfig, FragmentContext context, BufferAllocator allocator, RecordBatch incoming, RecordBatch outgoing,
- LogicalExpression[] valueExprs,
+ LogicalExpression[] valueExprs,
List<TypedFieldId> valueFieldIds,
TypedFieldId[] groupByOutFieldIds,
- VectorAllocator[] keyAllocators, VectorAllocator[] valueAllocators)
+ VectorAllocator[] keyAllocators, VectorAllocator[] valueAllocators)
throws SchemaChangeException, ClassTransformationException, IOException {
-
+
if (valueExprs == null || valueFieldIds == null) {
throw new IllegalArgumentException("Invalid aggr value exprs or workspace variables.");
}
if (valueFieldIds.size() < valueExprs.length) {
throw new IllegalArgumentException("Wrong number of workspace variables.");
}
-
+
this.context = context;
this.allocator = allocator;
this.incoming = incoming;
@@ -175,11 +175,11 @@ public abstract class HashAggTemplate implements HashAggregator {
this.keyAllocators = keyAllocators;
this.valueAllocators = valueAllocators;
this.outgoing = outgoing;
-
+
this.hashAggrConfig = hashAggrConfig;
- // currently, hash aggregation is only applicable if there are group-by expressions.
- // For non-grouped (a.k.a Plain) aggregations that don't involve DISTINCT, there is no
+ // currently, hash aggregation is only applicable if there are group-by expressions.
+ // For non-grouped (a.k.a Plain) aggregations that don't involve DISTINCT, there is no
// need to create hash table. However, for plain aggregations with DISTINCT ..
// e.g SELECT COUNT(DISTINCT a1) FROM t1 ;
// we need to build a hash table on the aggregation column a1.
@@ -188,14 +188,14 @@ public abstract class HashAggTemplate implements HashAggregator {
throw new IllegalArgumentException("Currently, hash aggregation is only applicable if there are group-by expressions.");
}
- this.htIdxHolder = new IntHolder();
+ this.htIdxHolder = new IntHolder();
materializedValueFields = new MaterializedField[valueFieldIds.size()];
if (valueFieldIds.size() > 0) {
int i = 0;
- FieldReference ref = new FieldReference("dummy", ExpressionPosition.UNKNOWN, valueFieldIds.get(0).getType());
+ FieldReference ref = new FieldReference("dummy", ExpressionPosition.UNKNOWN, valueFieldIds.get(0).getIntermediateType());
for (TypedFieldId id : valueFieldIds) {
- materializedValueFields[i++] = MaterializedField.create(ref, id.getType());
+ materializedValueFields[i++] = MaterializedField.create(ref, id.getIntermediateType());
}
}
@@ -203,7 +203,7 @@ public abstract class HashAggTemplate implements HashAggregator {
this.htable = ht.createAndSetupHashTable(groupByOutFieldIds) ;
batchHolders = new ArrayList<BatchHolder>();
- addBatchHolder();
+ addBatchHolder();
doSetup(incoming);
}
@@ -211,21 +211,21 @@ public abstract class HashAggTemplate implements HashAggregator {
@Override
public AggOutcome doWork() {
try{
- // Note: Keeping the outer and inner try blocks here to maintain some similarity with
- // StreamingAggregate which does somethings conditionally in the outer try block.
+ // Note: Keeping the outer and inner try blocks here to maintain some similarity with
+ // StreamingAggregate which does somethings conditionally in the outer try block.
// In the future HashAggregate may also need to perform some actions conditionally
- // in the outer try block.
+ // in the outer try block.
outside: while(true) {
// loop through existing records, aggregating the values as necessary.
if (EXTRA_DEBUG_1) logger.debug ("Starting outer loop of doWork()...");
for (; underlyingIndex < incoming.getRecordCount(); incIndex()) {
if(EXTRA_DEBUG_2) logger.debug("Doing loop with values underlying {}, current {}", underlyingIndex, currentIndex);
- checkGroupAndAggrValues(currentIndex);
+ checkGroupAndAggrValues(currentIndex);
}
if (EXTRA_DEBUG_1) logger.debug("Processed {} records", underlyingIndex);
-
+
try{
while(true){
@@ -239,10 +239,10 @@ public abstract class HashAggTemplate implements HashAggregator {
case NOT_YET:
this.outcome = out;
return AggOutcome.RETURN_OUTCOME;
-
+
case OK_NEW_SCHEMA:
if(EXTRA_DEBUG_1) logger.debug("Received new schema. Batch has {} records.", incoming.getRecordCount());
- newSchema = true;
+ newSchema = true;
this.cleanup();
// TODO: new schema case needs to be handled appropriately
return AggOutcome.UPDATE_AGGREGATOR;
@@ -254,20 +254,20 @@ public abstract class HashAggTemplate implements HashAggregator {
} else {
checkGroupAndAggrValues(currentIndex);
incIndex();
-
+
if(EXTRA_DEBUG_1) logger.debug("Continuing outside loop");
continue outside;
}
case NONE:
outcome = out;
- outputKeysAndValues() ;
-
+ outputKeysAndValues() ;
+
// cleanup my internal state since there is nothing more to return
this.cleanup();
// cleanup incoming batch since output of aggregation does not need
// any references to the incoming
-
+
incoming.cleanup();
return setOkAndReturn();
@@ -294,7 +294,7 @@ public abstract class HashAggTemplate implements HashAggregator {
// now otherwise downstream operators will break.
// TODO: allow outputting arbitrarily large number of records in batches
assert (numGroupedRecords < Character.MAX_VALUE);
-
+
for (VectorAllocator a : keyAllocators) {
if(EXTRA_DEBUG_2) logger.debug("Outgoing batch: Allocating {} with {} records.", a, numGroupedRecords);
a.alloc(numGroupedRecords);
@@ -320,14 +320,14 @@ public abstract class HashAggTemplate implements HashAggregator {
public void cleanup(){
htable.clear();
htable = null;
- htIdxHolder = null;
+ htIdxHolder = null;
materializedValueFields = null;
for (BatchHolder bh : batchHolders) {
bh.clear();
}
batchHolders.clear();
- batchHolders = null;
+ batchHolders = null;
}
private AggOutcome tooBigFailure(){
@@ -335,7 +335,7 @@ public abstract class HashAggTemplate implements HashAggregator {
this.outcome = IterOutcome.STOP;
return AggOutcome.CLEANUP_AND_RETURN;
}
-
+
private final AggOutcome setOkAndReturn(){
if(first){
this.outcome = IterOutcome.OK_NEW_SCHEMA;
@@ -356,20 +356,20 @@ public abstract class HashAggTemplate implements HashAggregator {
}
currentIndex = getVectorIndex(underlyingIndex);
}
-
+
private final void resetIndex(){
underlyingIndex = -1;
incIndex();
}
private void addBatchHolder() {
- BatchHolder bh = new BatchHolder();
+ BatchHolder bh = new BatchHolder();
batchHolders.add(bh);
if (EXTRA_DEBUG_1) logger.debug("HashAggregate: Added new batch; num batches = {}.", batchHolders.size());
int batchIdx = batchHolders.size() - 1;
- bh.setup(batchIdx);
+ bh.setup(batchIdx);
}
private boolean outputKeysAndValues() {
@@ -392,20 +392,20 @@ public abstract class HashAggTemplate implements HashAggregator {
return allFlushed;
}
- // Check if a group is present in the hash table; if not, insert it in the hash table.
- // The htIdxHolder contains the index of the group in the hash table container; this same
- // index is also used for the aggregation values maintained by the hash aggregate.
+ // Check if a group is present in the hash table; if not, insert it in the hash table.
+ // The htIdxHolder contains the index of the group in the hash table container; this same
+ // index is also used for the aggregation values maintained by the hash aggregate.
private boolean checkGroupAndAggrValues(int incomingRowIdx) {
if (incomingRowIdx < 0) {
throw new IllegalArgumentException("Invalid incoming row index.");
}
- /** for debugging
+ /** for debugging
Object tmp = (incoming).getValueAccessorById(0, BigIntVector.class).getValueVector();
BigIntVector vv0 = null;
BigIntHolder holder = null;
- if (tmp != null) {
+ if (tmp != null) {
vv0 = ((BigIntVector) tmp);
holder = new BigIntHolder();
holder.value = vv0.getAccessor().get(incomingRowIdx) ;
@@ -432,7 +432,7 @@ public abstract class HashAggTemplate implements HashAggregator {
// logger.debug("group-by key = {} already present at hash table index = {}", holder.value, currentIdx) ;
//}
- }
+ }
else if (putStatus == HashTable.PutStatus.KEY_ADDED) {
if (EXTRA_DEBUG_2) logger.debug("Group-by key was added to hash table, inserting new aggregate values") ;
@@ -441,17 +441,17 @@ public abstract class HashAggTemplate implements HashAggregator {
// logger.debug("group-by key = {} added at hash table index = {}", holder.value, currentIdx) ;
//}
}
-
+
if (bh.updateAggrValues(incomingRowIdx, idxWithinBatch)) {
numGroupedRecords++;
return true;
}
-
- }
+
+ }
return false;
}
-
+
// Code-generated methods (implemented in HashAggBatch)
public abstract void doSetup(@Named("incoming") RecordBatch incoming);
public abstract int getVectorIndex(@Named("recordIndex") int recordIndex);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
index 34845b3..3e6def1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/InternalBatch.java
@@ -28,12 +28,12 @@ import org.apache.drill.exec.record.selection.SelectionVector4;
public class InternalBatch implements Iterable<VectorWrapper<?>>{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(InternalBatch.class);
-
+
private final VectorContainer container;
private final BatchSchema schema;
private final SelectionVector2 sv2;
private final SelectionVector4 sv4;
-
+
public InternalBatch(RecordBatch incoming){
switch(incoming.getSchema().getSelectionVectorMode()){
case FOUR_BYTE:
@@ -42,7 +42,7 @@ public class InternalBatch implements Iterable<VectorWrapper<?>>{
break;
case TWO_BYTE:
this.sv4 = null;
- this.sv2 = incoming.getSelectionVector2().clone();
+ this.sv2 = incoming.getSelectionVector2().clone();
break;
default:
this.sv4 = null;
@@ -74,9 +74,9 @@ public class InternalBatch implements Iterable<VectorWrapper<?>>{
if(sv4 != null) sv4.clear();
container.clear();
}
-
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz){
- return container.getValueAccessorById(fieldId, clazz);
+
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int[] fieldIds){
+ return container.getValueAccessorById(clazz, fieldIds);
}
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
index 883052a..72d0462 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
@@ -348,8 +348,8 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP> {
container.add(v);
allocators.add(RemovingRecordBatch.getAllocator4(v));
- JVar inVV = g.declareVectorValueSetupAndMember("buildBatch", new TypedFieldId(vv.getField().getType(), fieldId, true));
- JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(vv.getField().getType(), fieldId, false));
+ JVar inVV = g.declareVectorValueSetupAndMember("buildBatch", new TypedFieldId(vv.getField().getType(), true, fieldId));
+ JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(vv.getField().getType(), false, fieldId));
g.getEvalBlock()._if(outVV.invoke("copyFromSafe")
.arg(buildIndex.band(JExpr.lit((int) Character.MAX_VALUE)))
.arg(outIndex)
@@ -376,8 +376,8 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP> {
container.add(v);
allocators.add(RemovingRecordBatch.getAllocator4(v));
- JVar inVV = g.declareVectorValueSetupAndMember("probeBatch", new TypedFieldId(vv.getField().getType(), fieldId, false));
- JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(vv.getField().getType(), outputFieldId, false));
+ JVar inVV = g.declareVectorValueSetupAndMember("probeBatch", new TypedFieldId(vv.getField().getType(), false, fieldId));
+ JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(vv.getField().getType(), false, outputFieldId));
g.getEvalBlock()._if(outVV.invoke("copyFromSafe").arg(probeIndex).arg(outIndex).arg(inVV).not())._then()._return(JExpr.FALSE);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
index baa232e..c07878a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinStatus.java
@@ -43,7 +43,7 @@ public final class JoinStatus {
private int rightPosition;
private int svRightPosition;
private IterOutcome lastRight;
-
+
private int outputPosition;
public RightSourceMode rightSourceMode = RightSourceMode.INCOMING;
public MergeJoinBatch outputBatch;
@@ -54,7 +54,7 @@ public final class JoinStatus {
public boolean ok = true;
private boolean initialSet = false;
private boolean leftRepeating = false;
-
+
public JoinStatus(RecordBatch left, RecordBatch right, MergeJoinBatch output) {
super();
this.left = left;
@@ -70,7 +70,7 @@ public final class JoinStatus {
initialSet = true;
}
}
-
+
public final void advanceLeft(){
leftPosition++;
}
@@ -90,6 +90,10 @@ public final class JoinStatus {
return (rightSourceMode == RightSourceMode.INCOMING) ? rightPosition : svRightPosition;
}
+ public final int getRightCount(){
+ return right.getRecordCount();
+ }
+
public final void setRightPosition(int pos) {
rightPosition = pos;
}
@@ -176,7 +180,7 @@ public final class JoinStatus {
}
if(b.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE) b.getSelectionVector2().clear();
}
-
+
/**
* Check if the left record position can advance by one in the current batch.
*/
@@ -230,5 +234,5 @@ public final class JoinStatus {
private boolean eitherMatches(IterOutcome outcome){
return lastLeft == outcome || lastRight == outcome;
}
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
index f43934e..af0d378 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/JoinTemplate.java
@@ -97,8 +97,8 @@ public abstract class JoinTemplate implements JoinWorker {
while (status.isLeftPositionAllowed()) {
if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
return false;
-
- status.incOutputPos();
+
+ status.incOutputPos();
status.advanceLeft();
}
}
@@ -113,7 +113,7 @@ public abstract class JoinTemplate implements JoinWorker {
case -1:
// left key < right key
if (((MergeJoinPOP)status.outputBatch.getPopConfig()).getJoinType() == JoinRelType.LEFT) {
- if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
+ if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
return false;
status.incOutputPos();
}
@@ -135,7 +135,7 @@ public abstract class JoinTemplate implements JoinWorker {
doCompareNextLeftKey(status.getLeftPosition()) != 0)
// this record marks the end of repeated keys
status.notifyLeftStoppedRepeating();
-
+
boolean crossedBatchBoundaries = false;
int initialRightPosition = status.getRightPosition();
do {
@@ -143,11 +143,11 @@ public abstract class JoinTemplate implements JoinWorker {
if (!doCopyLeft(status.getLeftPosition(), status.getOutPosition()))
return false;
- if (!doCopyRight(status.getRightPosition(), status.getOutPosition()))
+ if (!doCopyRight(status.getRightPosition(), status.getOutPosition()))
return false;
-
+
status.incOutputPos();
-
+
// If the left key has duplicates and we're about to cross a boundary in the right batch, add the
// right table's record batch to the sv4 builder before calling next. These records will need to be
// copied again for each duplicate left key.
@@ -170,7 +170,7 @@ public abstract class JoinTemplate implements JoinWorker {
status.notifyLeftStoppedRepeating();
} else if (status.isLeftRepeating() && crossedBatchBoundaries) {
try {
- // build the right batches and
+ // build the right batches and
status.outputBatch.batchBuilder.build();
status.setSV4AdvanceMode();
} catch (SchemaChangeException e) {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
index a2c424f..3d496d3 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java
@@ -350,10 +350,10 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
public TypedFieldId getValueVectorId(SchemaPath path) {
return outgoingContainer.getValueVectorId(path);
}
-
+
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return outgoingContainer.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
+ return outgoingContainer.getValueAccessorById(clazz, ids);
}
@Override
@@ -373,7 +373,7 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
/**
* Creates a generate class which implements the copy and compare methods.
- *
+ *
* @return instance of a new merger based on generated code
* @throws SchemaChangeException
*/
@@ -443,8 +443,8 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
// declare incoming value vector and assign it to the array
JVar inVV = cg.declareVectorValueSetupAndMember("incomingBatches[" + batchIdx + "]",
new TypedFieldId(vv.getField().getType(),
- fieldIdx,
- false));
+ false,
+ fieldIdx));
// add vv to initialization list (e.g. { vv1, vv2, vv3 } )
incomingVectorInitBatch.add(inVV);
@@ -501,11 +501,12 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
TypeProtos.MinorType minor = vvRead.getMajorType().getMinorType();
Class cmpVectorClass = TypeHelper.getValueVectorClass(minor, mode);
+ JExpression arr = JExpr.newArray(cg.getModel().INT).add(JExpr.lit(vvRead.getFieldId().getFieldIds()[0]));
comparisonVectorInitBatch.add(
((JExpression) incomingBatchesVar.component(JExpr.lit(b)))
.invoke("getValueAccessorById")
- .arg(JExpr.lit(vvRead.getFieldId().getFieldId()))
.arg(cg.getModel()._ref(cmpVectorClass).boxify().dotclass())
+ .arg(arr)
.invoke("getValueVector"));
}
@@ -583,8 +584,7 @@ public class MergingRecordBatch extends AbstractRecordBatch<MergingReceiverPOP>
// declare outgoing value vectors
JVar outgoingVV = cg.declareVectorValueSetupAndMember("outgoingBatch",
new TypedFieldId(vvOut.getField().getType(),
- fieldIdx,
- vvOut.isHyper()));
+ vvOut.isHyper(), fieldIdx));
// assign to the appropriate slot in the outgoingVector array (in order of iteration)
cg.getSetupBlock().assign(outgoingVectors.component(JExpr.lit(fieldIdx)), outgoingVV);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionProjectorTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionProjectorTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionProjectorTemplate.java
index dd7011a..3398443 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionProjectorTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/orderedpartitioner/OrderedPartitionProjectorTemplate.java
@@ -82,7 +82,7 @@ public abstract class OrderedPartitionProjectorTemplate implements OrderedPartit
this.svMode = incoming.getSchema().getSelectionVectorMode();
this.outBatch = outgoing;
this.outputField = outputField;
- partitionValues = (IntVector) outBatch.getValueAccessorById(outBatch.getValueVectorId(outputField).getFieldId(), IntVector.class).getValueVector();
+ partitionValues = (IntVector) outBatch.getValueAccessorById(IntVector.class, outBatch.getValueVectorId(outputField).getFieldIds()).getValueVector();
switch(svMode){
case FOUR_BYTE:
case TWO_BYTE:
@@ -98,7 +98,7 @@ public abstract class OrderedPartitionProjectorTemplate implements OrderedPartit
public abstract int doEval(@Named("inIndex") int inIndex, @Named("partitionIndex") int partitionIndex);
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/OutgoingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/OutgoingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/OutgoingRecordBatch.java
index 6e115a7..deef25f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/OutgoingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/OutgoingRecordBatch.java
@@ -219,8 +219,8 @@ public class OutgoingRecordBatch implements VectorAccessible {
}
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return vectorContainer.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... fieldIds) {
+ return vectorContainer.getValueAccessorById(clazz, fieldIds);
}
@Override
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
index 6048085..bcd484c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionSenderRootExec.java
@@ -94,7 +94,7 @@ public class PartitionSenderRootExec implements RootExec {
if (!ok) {
stop();
-
+
return false;
}
@@ -153,7 +153,7 @@ public class PartitionSenderRootExec implements RootExec {
}
-
+
private void generatePartitionFunction() throws SchemaChangeException {
LogicalExpression filterExpression = operator.getExpr();
@@ -166,7 +166,7 @@ public class PartitionSenderRootExec implements RootExec {
}
cg.addExpr(new ReturnValueExpression(expr));
-
+
try {
Partitioner p = context.getImplementationClass(cg);
p.setup(context, incoming, outgoing);
@@ -214,7 +214,7 @@ public class PartitionSenderRootExec implements RootExec {
"outgoingVectors");
// create 2d array and build initialization list. For example:
- // outgoingVectors = new ValueVector[][] {
+ // outgoingVectors = new ValueVector[][] {
// new ValueVector[] {vv1, vv2},
// new ValueVector[] {vv3, vv4}
// });
@@ -229,8 +229,8 @@ public class PartitionSenderRootExec implements RootExec {
// declare outgoing value vector and assign it to the array
JVar outVV = cg.declareVectorValueSetupAndMember("outgoing[" + batchId + "]",
new TypedFieldId(vv.getField().getType(),
- fieldId,
- false));
+ false,
+ fieldId));
// add vv to initialization list (e.g. { vv1, vv2, vv3 } )
outgoingVectorInitBatch.add(outVV);
++fieldId;
@@ -248,8 +248,8 @@ public class PartitionSenderRootExec implements RootExec {
for (VectorWrapper<?> vvIn : incoming) {
// declare incoming value vectors
JVar incomingVV = cg.declareVectorValueSetupAndMember("incoming", new TypedFieldId(vvIn.getField().getType(),
- fieldId,
- vvIn.isHyper()));
+ vvIn.isHyper(),
+ fieldId));
// generate the copyFrom() invocation with explicit cast to the appropriate type
Class<?> vvType = TypeHelper.getValueVectorClass(vvIn.getField().getType().getMinorType(),
@@ -307,7 +307,7 @@ public class PartitionSenderRootExec implements RootExec {
}
}
}
-
+
public void stop() {
logger.debug("Partition sender stopping.");
ok = false;
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
index 347092a..b94f403 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectRecordBatch.java
@@ -20,9 +20,7 @@ package org.apache.drill.exec.physical.impl.project;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
-import java.util.Set;
-import com.sun.codemodel.JExpr;
import org.apache.drill.common.expression.ErrorCollector;
import org.apache.drill.common.expression.ErrorCollectorImpl;
import org.apache.drill.common.expression.FieldReference;
@@ -31,7 +29,6 @@ import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.PathSegment.NameSegment;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.data.NamedExpression;
-import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.exec.exception.ClassTransformationException;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.ClassGenerator;
@@ -44,7 +41,6 @@ import org.apache.drill.exec.expr.ValueVectorWriteExpression;
import org.apache.drill.exec.memory.OutOfMemoryException;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.physical.config.Project;
-import org.apache.drill.exec.physical.impl.filter.ReturnValueExpression;
import org.apache.drill.exec.record.AbstractSingleRecordBatch;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
import org.apache.drill.exec.record.MaterializedField;
@@ -52,12 +48,13 @@ import org.apache.drill.exec.record.RecordBatch;
import org.apache.drill.exec.record.TransferPair;
import org.apache.drill.exec.record.TypedFieldId;
import org.apache.drill.exec.record.VectorWrapper;
-import org.apache.drill.exec.util.VectorUtil;
import org.apache.drill.exec.vector.AllocationHelper;
import org.apache.drill.exec.vector.ValueVector;
+import com.carrotsearch.hppc.IntOpenHashSet;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
+import com.sun.codemodel.JExpr;
public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProjectRecordBatch.class);
@@ -92,6 +89,7 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
int incomingRecordCount = incoming.getRecordCount();
for(ValueVector v : this.allocationVectors){
AllocationHelper.allocate(v, incomingRecordCount, 250);
+// v.allocateNew();
}
int outputRecords = projector.projectRecords(0, incomingRecordCount, 0);
if (outputRecords < incomingRecordCount) {
@@ -177,14 +175,15 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
final ClassGenerator<Projector> cg = CodeGenerator.getRoot(Projector.TEMPLATE_DEFINITION, context.getFunctionRegistry());
- Set<Integer> transferFieldIds = new HashSet();
+ IntOpenHashSet transferFieldIds = new IntOpenHashSet();
boolean isAnyWildcard = isAnyWildcard(exprs);
if(isAnyWildcard){
for(VectorWrapper<?> wrapper : incoming){
ValueVector vvIn = wrapper.getValueVector();
- String name = vvIn.getField().getDef().getName(vvIn.getField().getDef().getNameCount() - 1).getName();
+
+ String name = vvIn.getField().getPath().getLastSegment().getNameSegment().getPath();
FieldReference ref = new FieldReference(name);
TransferPair tp = wrapper.getValueVector().getTransferPair(ref);
transfers.add(tp);
@@ -202,17 +201,19 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
// add value vector to transfer if direct reference and this is allowed, otherwise, add to evaluation stack.
if(expr instanceof ValueVectorReadExpression && incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.NONE
+ && !((ValueVectorReadExpression) expr).hasReadPath()
&& !isAnyWildcard
- &&!transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldId())
- && !((ValueVectorReadExpression) expr).isArrayElement()) {
+ && !transferFieldIds.contains(((ValueVectorReadExpression) expr).getFieldId().getFieldIds()[0])
+ && !((ValueVectorReadExpression) expr).hasReadPath()) {
ValueVectorReadExpression vectorRead = (ValueVectorReadExpression) expr;
- ValueVector vvIn = incoming.getValueAccessorById(vectorRead.getFieldId().getFieldId(), TypeHelper.getValueVectorClass(vectorRead.getMajorType().getMinorType(), vectorRead.getMajorType().getMode())).getValueVector();
+ TypedFieldId id = vectorRead.getFieldId();
+ ValueVector vvIn = incoming.getValueAccessorById(id.getIntermediateClass(), id.getFieldIds()).getValueVector();
Preconditions.checkNotNull(incoming);
TransferPair tp = vvIn.getTransferPair(getRef(namedExpression));
transfers.add(tp);
container.add(tp.getTo());
- transferFieldIds.add(vectorRead.getFieldId().getFieldId());
+ transferFieldIds.add(vectorRead.getFieldId().getFieldIds()[0]);
// logger.debug("Added transfer.");
}else{
// need to do evaluation.
@@ -221,6 +222,8 @@ public class ProjectRecordBatch extends AbstractSingleRecordBatch<Project>{
TypedFieldId fid = container.add(vector);
ValueVectorWriteExpression write = new ValueVectorWriteExpression(fid, expr, true);
HoldingContainer hc = cg.addExpr(write);
+
+
cg.getEvalBlock()._if(hc.getValue().eq(JExpr.lit(0)))._then()._return(JExpr.FALSE);
logger.debug("Added eval.");
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
index 60e5993..aa0ecf6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/project/ProjectorTemplate.java
@@ -33,12 +33,12 @@ import com.google.common.collect.ImmutableList;
public abstract class ProjectorTemplate implements Projector {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProjectorTemplate.class);
-
+
private ImmutableList<TransferPair> transfers;
private SelectionVector2 vector2;
private SelectionVector4 vector4;
private SelectionVectorMode svMode;
-
+
public ProjectorTemplate() throws SchemaChangeException{
}
@@ -47,18 +47,18 @@ public abstract class ProjectorTemplate implements Projector {
switch(svMode){
case FOUR_BYTE:
throw new UnsupportedOperationException();
-
-
+
+
case TWO_BYTE:
final int count = recordCount;
for(int i = 0; i < count; i++, firstOutputIndex++){
doEval(vector2.getIndex(i), firstOutputIndex);
}
return recordCount;
-
-
+
+
case NONE:
-
+
final int countN = recordCount;
int i;
for (i = startIndex; i < startIndex + countN; i++, firstOutputIndex++) {
@@ -76,8 +76,9 @@ public abstract class ProjectorTemplate implements Projector {
t.transfer();
}
return recordCount;
-
-
+
+
+
default:
throw new UnsupportedOperationException();
}
@@ -86,7 +87,7 @@ public abstract class ProjectorTemplate implements Projector {
@Override
public final void setup(FragmentContext context, RecordBatch incoming, RecordBatch outgoing, List<TransferPair> transfers) throws SchemaChangeException{
- this.svMode = incoming.getSchema().getSelectionVectorMode();
+ this.svMode = incoming.getSchema().getSelectionVectorMode();
switch(svMode){
case FOUR_BYTE:
this.vector4 = incoming.getSelectionVector4();
@@ -103,7 +104,7 @@ public abstract class ProjectorTemplate implements Projector {
public abstract boolean doEval(@Named("inIndex") int inIndex, @Named("outIndex") int outIndex);
-
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
index 4018991..62af0b2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/svremover/RemovingRecordBatch.java
@@ -86,7 +86,7 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
default:
throw new UnsupportedOperationException();
}
-
+
container.buildSchema(SelectionVectorMode.NONE);
}
@@ -156,12 +156,12 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
public void cleanup(){
super.cleanup();
}
-
+
private class StraightCopier implements Copier{
private List<TransferPair> pairs = Lists.newArrayList();
private List<ValueVector> out = Lists.newArrayList();
-
+
@Override
public void setupRemover(FragmentContext context, RecordBatch incoming, RecordBatch outgoing, VectorAllocator[] allocators){
for(VectorWrapper<?> vv : incoming){
@@ -183,7 +183,7 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
public List<ValueVector> getOut() {
return out;
}
-
+
}
private Copier getStraightCopier(){
@@ -192,10 +192,10 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
container.addCollection(copier.getOut());
return copier;
}
-
+
private Copier getGenerated2Copier() throws SchemaChangeException{
Preconditions.checkArgument(incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.TWO_BYTE);
-
+
List<VectorAllocator> allocators = Lists.newArrayList();
for(VectorWrapper<?> i : incoming){
ValueVector v = TypeHelper.getNewVector(i.getField(), oContext.getAllocator());
@@ -218,12 +218,12 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
Preconditions.checkArgument(incoming.getSchema().getSelectionVectorMode() == SelectionVectorMode.FOUR_BYTE);
return getGenerated4Copier(incoming, context, oContext.getAllocator(), container, this);
}
-
+
public static Copier getGenerated4Copier(RecordBatch batch, FragmentContext context, BufferAllocator allocator, VectorContainer container, RecordBatch outgoing) throws SchemaChangeException{
List<VectorAllocator> allocators = Lists.newArrayList();
for(VectorWrapper<?> i : batch){
-
+
ValueVector v = TypeHelper.getNewVector(i.getField(), allocator);
container.add(v);
allocators.add(getAllocator4(v));
@@ -239,20 +239,20 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
throw new SchemaChangeException("Failure while attempting to load generated class", e);
}
}
-
+
public static void generateCopies(ClassGenerator g, VectorAccessible batch, boolean hyper){
// we have parallel ids for each value vector so we don't actually have to deal with managing the ids at all.
int fieldId = 0;
-
+
JExpression inIndex = JExpr.direct("inIndex");
JExpression outIndex = JExpr.direct("outIndex");
g.rotateBlock();
for(VectorWrapper<?> vv : batch){
- JVar inVV = g.declareVectorValueSetupAndMember("incoming", new TypedFieldId(vv.getField().getType(), fieldId, vv.isHyper()));
- JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(vv.getField().getType(), fieldId, false));
+ JVar inVV = g.declareVectorValueSetupAndMember("incoming", new TypedFieldId(vv.getField().getType(), vv.isHyper(), fieldId));
+ JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(vv.getField().getType(), false, fieldId));
if(hyper){
-
+
g.getEvalBlock()._if(
outVV
.invoke("copyFromSafe")
@@ -268,20 +268,20 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
}else{
g.getEvalBlock()._if(outVV.invoke("copyFromSafe").arg(inIndex).arg(outIndex).arg(inVV).not())._then()._return(JExpr.FALSE);
}
-
-
+
+
fieldId++;
}
g.rotateBlock();
g.getEvalBlock()._return(JExpr.TRUE);
}
-
+
@Override
public WritableBatch getWritableBatch() {
return WritableBatch.get(this);
}
-
+
public static VectorAllocator getAllocator4(ValueVector outgoing){
if(outgoing instanceof FixedWidthVector){
return new FixedVectorAllocator((FixedWidthVector) outgoing);
@@ -291,6 +291,6 @@ public class RemovingRecordBatch extends AbstractSingleRecordBatch<SelectionVect
throw new UnsupportedOperationException();
}
}
-
-
+
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
index 8d3a3e5..f96a1bd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/validate/IteratorValidatorBatchIterator.java
@@ -100,9 +100,9 @@ public class IteratorValidatorBatchIterator implements RecordBatch {
}
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
validateReadState();
- return incoming.getValueAccessorById(fieldId, clazz);
+ return incoming.getValueAccessorById(clazz, ids);
}
@Override
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
index d87a9f5..a546852 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/BatchGroup.java
@@ -242,8 +242,8 @@ public class BatchGroup implements VectorAccessible {
}
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return currentContainer.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
+ return currentContainer.getValueAccessorById(clazz, ids);
}
@Override
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
index 930f851..4b6c37d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
@@ -373,12 +373,16 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
ValueVector[] vectors = new ValueVector[batchGroupList.size() * 2];
int i = 0;
for (BatchGroup group : batchGroupList) {
- vectors[i++] = group.getValueAccessorById(group.getValueVectorId(field.getAsSchemaPath()).getFieldId(),
- field.getValueClass()).getValueVector();
+ vectors[i++] = group.getValueAccessorById(
+ field.getValueClass(),
+ group.getValueVectorId(field.getPath()).getFieldIds()
+ ).getValueVector();
if (group.hasSecond()) {
VectorContainer c = group.getSecondContainer();
- vectors[i++] = c.getValueAccessorById(c.getValueVectorId(field.getAsSchemaPath()).getFieldId(),
- field.getValueClass()).getValueVector();
+ vectors[i++] = c.getValueAccessorById(
+ field.getValueClass(),
+ c.getValueVectorId(field.getPath()).getFieldIds()
+ ).getValueVector();
} else {
vectors[i] = vectors[i - 1].getTransferPair().getTo(); //this vector should never be used. Just want to avoid having null elements in the hyper vector
i++;
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
index 214f81c..844d6db 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
@@ -31,7 +31,7 @@ import org.apache.drill.exec.record.selection.SelectionVector4;
public abstract class AbstractRecordBatch<T extends PhysicalOperator> implements RecordBatch{
final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(this.getClass());
-
+
protected final VectorContainer container = new VectorContainer();
protected final T popConfig;
protected final FragmentContext context;
@@ -43,7 +43,7 @@ public abstract class AbstractRecordBatch<T extends PhysicalOperator> implements
this.popConfig = popConfig;
this.oContext = new OperatorContext(popConfig, context);
}
-
+
@Override
public Iterator<VectorWrapper<?>> iterator() {
return container.iterator();
@@ -67,14 +67,14 @@ public abstract class AbstractRecordBatch<T extends PhysicalOperator> implements
public void kill() {
killIncoming();
}
-
+
protected abstract void killIncoming();
-
+
public void cleanup(){
container.clear();
oContext.close();
}
-
+
@Override
public SelectionVector2 getSelectionVector2() {
throw new UnsupportedOperationException();
@@ -91,16 +91,16 @@ public abstract class AbstractRecordBatch<T extends PhysicalOperator> implements
}
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- return container.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) {
+ return container.getValueAccessorById(clazz, ids);
}
-
+
@Override
public WritableBatch getWritableBatch() {
// logger.debug("Getting writable batch.");
WritableBatch batch = WritableBatch.get(this);
return batch;
-
+
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
index a6a4621..b44a233 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java
@@ -20,21 +20,25 @@ package org.apache.drill.exec.record;
import java.lang.reflect.Array;
import com.google.common.base.Preconditions;
+
import org.apache.commons.lang3.ArrayUtils;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
+import org.apache.drill.exec.vector.complex.MapVector;
public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<T>{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(HyperVectorWrapper.class);
-
+
private T[] vectors;
private MaterializedField f;
private final boolean releasable;
-
+
public HyperVectorWrapper(MaterializedField f, T[] v){
this(f, v, true);
}
-
+
public HyperVectorWrapper(MaterializedField f, T[] v, boolean releasable){
assert(v.length > 0);
this.f = f;
@@ -72,9 +76,51 @@ public class HyperVectorWrapper<T extends ValueVector> implements VectorWrapper<
public void clear() {
if(!releasable) return;
for(T x : vectors){
- x.clear();
+ x.clear();
+ }
+ }
+
+ @Override
+ public VectorWrapper<?> getChildWrapper(int[] ids) {
+ if(ids.length == 1) return this;
+
+ ValueVector[] vectors = new ValueVector[this.vectors.length];
+ int index = 0;
+
+ for(ValueVector v : this.vectors){
+ ValueVector vector = v;
+ for(int i = 1; i < ids.length; i++){
+ MapVector map = (MapVector) vector;
+ vector = map.getVectorById(ids[i]);
+ }
+ vectors[index] = vector;
+ index++;
+ }
+ return new HyperVectorWrapper<ValueVector>(vectors[0].getField(), vectors);
+ }
+
+ @Override
+ public TypedFieldId getFieldIdIfMatches(int id, SchemaPath expectedPath) {
+ ValueVector v = vectors[0];
+ if(!expectedPath.getRootSegment().segmentEquals(v.getField().getPath().getRootSegment())) return null;
+
+ if(v instanceof AbstractContainerVector){
+ // we're looking for a multi path.
+ AbstractContainerVector c = (AbstractContainerVector) v;
+ TypedFieldId.Builder builder = TypedFieldId.newBuilder();
+ builder.intermediateType(v.getField().getType());
+ builder.hyper();
+ builder.addId(id);
+ return c.getFieldIdIfMatches(builder, true, expectedPath.getRootSegment().getChild());
+
+ }else{
+ return TypedFieldId.newBuilder() //
+ .intermediateType(v.getField().getType()) //
+ .finalType(v.getField().getType()) //
+ .addId(id) //
+ .hyper() //
+ .build();
}
-
}
@Override
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
index d93e258..439552f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/MaterializedField.java
@@ -17,39 +17,58 @@
*/
package org.apache.drill.exec.record;
-import java.util.Collections;
-import java.util.Iterator;
import java.util.List;
import org.apache.drill.common.expression.FieldReference;
-import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.PathSegment;
-import org.apache.drill.common.expression.PathSegment.NameSegment;
import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.logical.data.NamedExpression;
-import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.exec.expr.TypeHelper;
-import org.apache.drill.exec.proto.SchemaDefProtos.FieldDef;
-import org.apache.drill.exec.proto.SchemaDefProtos.NamePart;
-import org.apache.drill.exec.proto.SchemaDefProtos.NamePart.Type;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
-import com.beust.jcommander.internal.Lists;
+import com.google.hive12.common.collect.Lists;
public class MaterializedField{
- private final FieldDef def;
+ private SchemaPath path;
+ private MajorType type;
+ private List<MaterializedField> children = Lists.newArrayList();
- public MaterializedField(FieldDef def) {
- this.def = def;
+ private MaterializedField(SchemaPath path, MajorType type) {
+ super();
+ this.path = path;
+ this.type = type;
}
- public static MaterializedField create(FieldDef def){
- return new MaterializedField(def);
+ public static MaterializedField create(SerializedField serField){
+ return new MaterializedField(SchemaPath.create(serField.getNamePart()), serField.getMajorType());
+ }
+
+ public SerializedField.Builder getAsBuilder(){
+ return SerializedField.newBuilder() //
+ .setMajorType(type) //
+ .setNamePart(path.getAsNamePart());
+ }
+
+ public void addChild(MaterializedField field){
+ children.add(field);
}
public MaterializedField clone(FieldReference ref){
- return create(ref, def.getMajorType());
+ return create(ref, type);
+ }
+
+ public String getLastName(){
+ PathSegment seg = path.getRootSegment();
+ while(seg.getChild() != null) seg = seg.getChild();
+ return seg.getNameSegment().getPath();
+ }
+
+
+ // TODO: rewrite without as direct match rather than conversion then match.
+ public boolean matches(SerializedField field){
+ MaterializedField f = create(field);
+ return f.equals(this);
}
public static MaterializedField create(String path, MajorType type){
@@ -58,43 +77,20 @@ public class MaterializedField{
}
public static MaterializedField create(SchemaPath path, MajorType type) {
- FieldDef.Builder b = FieldDef.newBuilder();
- b.setMajorType(type);
- addSchemaPathToFieldDef(path, b);
- return create(b.build());
- }
-
- private static void addSchemaPathToFieldDef(SchemaPath path, FieldDef.Builder builder) {
- for (PathSegment p = path.getRootSegment();; p = p.getChild()) {
- NamePart.Builder b = NamePart.newBuilder();
- if (p.isArray()) {
- b.setType(Type.ARRAY);
- } else {
- b.setName(p.getNameSegment().getPath().toString());
- b.setType(Type.NAME);
- }
- builder.addName(b.build());
- if(p.isLastPath()) break;
- }
+ return new MaterializedField(path, type);
}
- public FieldDef getDef() {
- return def;
+ public SchemaPath getPath(){
+ return path;
}
+ /**
+ * Get the schema path. Deprecated, use getPath() instead.
+ * @return the SchemaPath of this field.
+ */
+ @Deprecated
public SchemaPath getAsSchemaPath(){
- List<NamePart> nameList = Lists.newArrayList(def.getNameList());
- Collections.reverse(nameList);
- PathSegment seg = null;
- for(NamePart p : nameList){
- if(p.getType() == NamePart.Type.ARRAY){
- throw new UnsupportedOperationException();
- }else{
- seg = new NameSegment(p.getName(), seg);
- }
- }
- if( !(seg instanceof NameSegment) ) throw new UnsupportedOperationException();
- return new SchemaPath( (NameSegment) seg);
+ return path;
}
// public String getName(){
@@ -119,29 +115,29 @@ public class MaterializedField{
// }
public int getWidth() {
- return def.getMajorType().getWidth();
+ return type.getWidth();
}
public MajorType getType() {
- return def.getMajorType();
+ return type;
}
public int getScale() {
- return def.getMajorType().getScale();
+ return type.getScale();
}
public int getPrecision() {
- return def.getMajorType().getPrecision();
+ return type.getPrecision();
}
public boolean isNullable() {
- return def.getMajorType().getMode() == DataMode.OPTIONAL;
+ return type.getMode() == DataMode.OPTIONAL;
}
public DataMode getDataMode() {
- return def.getMajorType().getMode();
+ return type.getMode();
}
public MaterializedField getOtherNullableVersion(){
- MajorType mt = def.getMajorType();
+ MajorType mt = type;
DataMode newDataMode = null;
switch(mt.getMode()){
case OPTIONAL:
@@ -153,7 +149,7 @@ public class MaterializedField{
default:
throw new UnsupportedOperationException();
}
- return new MaterializedField(def.toBuilder().setMajorType(mt.toBuilder().setMode(newDataMode).build()).build());
+ return new MaterializedField(path, mt.toBuilder().setMode(newDataMode).build());
}
public Class<?> getValueClass() {
@@ -161,33 +157,19 @@ public class MaterializedField{
}
public boolean matches(SchemaPath path) {
- Iterator<NamePart> iter = def.getNameList().iterator();
+ if(!path.isSimplePath()) return false;
- for (PathSegment p = path.getRootSegment();; p = p.getChild()) {
- if(p == null) break;
- if (!iter.hasNext()) return false;
- NamePart n = iter.next();
-
- if (p.isArray()) {
- if (n.getType() == Type.ARRAY) continue;
- return false;
- } else {
- if (p.getNameSegment().getPath().equalsIgnoreCase(n.getName())) continue;
- return false;
- }
-
- }
- // we've reviewed all path segments. confirm that we don't have any extra name parts.
- return !iter.hasNext();
+ return this.path.equals(path);
}
-
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
- result = prime * result + ((def == null) ? 0 : def.hashCode());
+ result = prime * result + ((children == null) ? 0 : children.hashCode());
+ result = prime * result + ((path == null) ? 0 : path.hashCode());
+ result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@@ -200,20 +182,30 @@ public class MaterializedField{
if (getClass() != obj.getClass())
return false;
MaterializedField other = (MaterializedField) obj;
- if (def == null) {
- if (other.def != null)
+ if (children == null) {
+ if (other.children != null)
+ return false;
+ } else if (!children.equals(other.children))
+ return false;
+ if (path == null) {
+ if (other.path != null)
+ return false;
+ } else if (!path.equals(other.path))
+ return false;
+ if (type == null) {
+ if (other.type != null)
return false;
- } else if (!def.equals(other.def))
+ } else if (!type.equals(other.type))
return false;
return true;
}
@Override
public String toString() {
- return "MaterializedField [" + def.toString() + "]";
+ return "MaterializedField [path=" + path + ", type=" + type + "]";
}
public String toExpr(){
- return this.getAsSchemaPath().toExpr();
+ return path.toExpr();
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
index 31283c6..60fdd4d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatch.java
@@ -27,7 +27,7 @@ import org.apache.drill.exec.vector.ValueVector;
* A record batch contains a set of field values for a particular range of records. In the case of a record batch
* composed of ValueVectors, ideally a batch fits within L2 cache (~256k per core). The set of value vectors do not
* change unless the next() IterOutcome is a *_NEW_SCHEMA type.
- *
+ *
* A key thing to know is that the Iterator provided by record batch must align with the rank positions of the field ids
* provided utilizing getValueVectorId();
*/
@@ -56,21 +56,21 @@ public interface RecordBatch extends VectorAccessible {
/**
* Access the FragmentContext of the current query fragment. Useful for reporting failure information or other query
* level information.
- *
+ *
* @return
*/
public FragmentContext getContext();
/**
* Provide the schema of the current RecordBatch. This changes if and only if a *_NEW_SCHEMA IterOutcome is provided.
- *
+ *
* @return
*/
public BatchSchema getSchema();
/**
* Provide the number of records that are within this record count
- *
+ *
* @return
*/
public int getRecordCount();
@@ -89,7 +89,7 @@ public interface RecordBatch extends VectorAccessible {
* Get the value vector type and id for the given schema path. The TypedFieldId should store a fieldId which is the
* same as the ordinal position of the field within the Iterator provided this classes implementation of
* Iterable<ValueVector>.
- *
+ *
* @param path
* The path where the vector should be located.
* @return The local field id associated with this vector. If no field matches this path, this will return a null
@@ -97,24 +97,24 @@ public interface RecordBatch extends VectorAccessible {
*/
public abstract TypedFieldId getValueVectorId(SchemaPath path);
@Override
- public abstract VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz);
+ public abstract VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids);
/**
* Update the data in each Field reading interface for the next range of records. Once a RecordBatch returns an
* IterOutcome.NONE, the consumer should no longer next(). Behavior at this point is undetermined and likely to throw
* an exception.
- *
+ *
* @return An IterOutcome describing the result of the iteration.
*/
public IterOutcome next();
/**
* Get a writable version of this batch. Takes over owernship of existing buffers.
- *
+ *
* @return
*/
public WritableBatch getWritableBatch();
-
+
public void cleanup();
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
index ed450af..10d959f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/RecordBatchLoader.java
@@ -23,13 +23,14 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import javax.jdo.metadata.FieldMetadata;
+
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.memory.BufferAllocator;
-import org.apache.drill.exec.proto.SchemaDefProtos.FieldDef;
-import org.apache.drill.exec.proto.UserBitShared.FieldMetadata;
import org.apache.drill.exec.proto.UserBitShared.RecordBatchDef;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.vector.ValueVector;
import com.google.common.collect.Maps;
@@ -63,25 +64,24 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
boolean schemaChanged = schema == null;
// logger.info("Load, ThreadID: {}", Thread.currentThread().getId(), new RuntimeException("For Stack Trace Only"));
// System.out.println("Load, ThreadId: " + Thread.currentThread().getId());
- Map<FieldDef, ValueVector> oldFields = Maps.newHashMap();
+ Map<MaterializedField, ValueVector> oldFields = Maps.newHashMap();
for(VectorWrapper<?> w : container){
ValueVector v = w.getValueVector();
- oldFields.put(v.getField().getDef(), v);
+ oldFields.put(v.getField(), v);
}
VectorContainer newVectors = new VectorContainer();
- List<FieldMetadata> fields = def.getFieldList();
+ List<SerializedField> fields = def.getFieldList();
int bufOffset = 0;
- for (FieldMetadata fmd : fields) {
- FieldDef fieldDef = fmd.getDef();
+ for (SerializedField fmd : fields) {
+ MaterializedField fieldDef = MaterializedField.create(fmd);
ValueVector v = oldFields.remove(fieldDef);
if(v == null) {
// if we arrive here, we didn't have a matching vector.
schemaChanged = true;
- MaterializedField m = new MaterializedField(fieldDef);
- v = TypeHelper.getNewVector(m, allocator);
+ v = TypeHelper.getNewVector(fieldDef, allocator);
}
if (fmd.getValueCount() == 0){
v.clear();
@@ -136,8 +136,8 @@ public class RecordBatchLoader implements VectorAccessible, Iterable<VectorWrapp
return valueCount;
}
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz){
- return container.getValueAccessorById(fieldId, clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids){
+ return container.getValueAccessorById(clazz, ids);
}
public WritableBatch getWritableBatch(){
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
index b7a8248..692fe62 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java
@@ -17,11 +17,15 @@
*/
package org.apache.drill.exec.record;
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
+import org.apache.drill.exec.vector.complex.MapVector;
public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper<T>{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SimpleVectorWrapper.class);
-
+
private T v;
public SimpleVectorWrapper(T v){
@@ -53,8 +57,7 @@ public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper
public boolean isHyper() {
return false;
}
-
-
+
@SuppressWarnings("unchecked")
@Override
public VectorWrapper<T> cloneAndTransfer() {
@@ -71,4 +74,56 @@ public class SimpleVectorWrapper<T extends ValueVector> implements VectorWrapper
public static <T extends ValueVector> SimpleVectorWrapper<T> create(T v){
return new SimpleVectorWrapper<T>(v);
}
+
+
+ @Override
+ public VectorWrapper<?> getChildWrapper(int[] ids) {
+ if(ids.length == 1) return this;
+
+ ValueVector vector = v;
+
+ for(int i = 1; i < ids.length; i++){
+ MapVector map = (MapVector) vector;
+ vector = map.getVectorById(ids[i]);
+ }
+
+ return new SimpleVectorWrapper<ValueVector>(vector);
+ }
+
+ @Override
+ public TypedFieldId getFieldIdIfMatches(int id, SchemaPath expectedPath) {
+ if(!expectedPath.getRootSegment().segmentEquals(v.getField().getPath().getRootSegment())) return null;
+ PathSegment seg = expectedPath.getRootSegment();
+
+ if(v instanceof AbstractContainerVector){
+ // we're looking for a multi path.
+ AbstractContainerVector c = (AbstractContainerVector) v;
+ TypedFieldId.Builder builder = TypedFieldId.newBuilder();
+ builder.intermediateType(v.getField().getType());
+ builder.addId(id);
+ return c.getFieldIdIfMatches(builder, true, expectedPath.getRootSegment().getChild());
+
+ }else{
+ TypedFieldId.Builder builder = TypedFieldId.newBuilder();
+ builder.intermediateType(v.getField().getType());
+ builder.addId(id);
+ builder.finalType(v.getField().getType());
+ if(seg.isLastPath()){
+ return builder.build();
+ }else{
+ PathSegment child = seg.getChild();
+ if(child.isArray() && child.isLastPath()){
+ builder.remainder(child);
+ builder.withIndex();
+ return builder.build();
+ }else{
+ return null;
+ }
+
+ }
+
+ }
+ }
+
+
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/TransferPair.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TransferPair.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TransferPair.java
index ba2c7b2..9645be9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TransferPair.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TransferPair.java
@@ -23,5 +23,5 @@ public interface TransferPair {
public void transfer();
public void splitAndTransfer(int startIndex, int length);
public ValueVector getTo();
- public void copyValue(int from, int to);
+ public boolean copyValueSafe(int from, int to);
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
index 0fbd0ae..24a8251 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java
@@ -17,34 +17,174 @@
*/
package org.apache.drill.exec.record;
+import java.util.Arrays;
+
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.drill.common.expression.PathSegment;
+import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.vector.ValueVector;
+
+import com.carrotsearch.hppc.IntArrayList;
+import com.google.common.base.Preconditions;
public class TypedFieldId {
- final MajorType type;
- final int fieldId;
+ final MajorType finalType;
+ final MajorType secondaryFinal;
+ final MajorType intermediateType;
+ final int[] fieldIds;
final boolean isHyperReader;
+ final PathSegment remainder;
+
+ public TypedFieldId(MajorType type, int... fieldIds){
+ this(type, type, type, false, null, fieldIds);
+ }
- public TypedFieldId(MajorType type, int fieldId){
- this(type, fieldId, false);
+ public TypedFieldId(MajorType type, IntArrayList breadCrumb, PathSegment remainder){
+ this(type, type, type, false, remainder, breadCrumb.toArray());
}
-
- public TypedFieldId(MajorType type, int fieldId, boolean isHyper) {
+
+ public TypedFieldId(MajorType type, boolean isHyper, int... fieldIds){
+ this(type, type, type, isHyper, null, fieldIds);
+ }
+
+ public TypedFieldId(MajorType intermediateType, MajorType secondaryFinal, MajorType finalType, boolean isHyper, PathSegment remainder, int... fieldIds) {
super();
- this.type = type;
- this.fieldId = fieldId;
+ this.intermediateType = intermediateType;
+ this.finalType = finalType;
+ this.secondaryFinal = secondaryFinal;
+ this.fieldIds = fieldIds;
this.isHyperReader = isHyper;
+ this.remainder = remainder;
+ }
+
+
+
+ public TypedFieldId cloneWithChild(int id){
+ int[] fieldIds = ArrayUtils.add(this.fieldIds, id);
+ return new TypedFieldId(intermediateType, secondaryFinal, finalType, isHyperReader, remainder, fieldIds);
+ }
+
+ public PathSegment getLastSegment(){
+ if(remainder == null) return null;
+ PathSegment seg = remainder;
+ while(seg.getChild() != null){
+ seg = seg.getChild();
+ }
+ return seg;
+ }
+
+ public TypedFieldId cloneWithRemainder(PathSegment remainder){
+ return new TypedFieldId(intermediateType, secondaryFinal, finalType, isHyperReader, remainder, fieldIds);
+ }
+
+ public boolean hasRemainder(){
+ return remainder != null;
+ }
+
+ public PathSegment getRemainder(){
+ return remainder;
}
public boolean isHyperReader(){
return isHyperReader;
}
-
- public MajorType getType() {
- return type;
+
+ public MajorType getIntermediateType() {
+ return intermediateType;
+ }
+
+ public Class<? extends ValueVector> getIntermediateClass(){
+ return (Class<? extends ValueVector>) TypeHelper.getValueVectorClass(intermediateType.getMinorType(), intermediateType.getMode());
+ }
+
+ public MajorType getFinalType(){
+ return finalType;
}
- public int getFieldId() {
- return fieldId;
+ public int[] getFieldIds() {
+ return fieldIds;
+ }
+
+
+
+ public MajorType getSecondaryFinal() {
+ return secondaryFinal;
+ }
+
+ public static Builder newBuilder(){
+ return new Builder();
+ }
+
+ public static class Builder{
+ final IntArrayList ids = new IntArrayList();
+ MajorType finalType;
+ MajorType intermediateType;
+ PathSegment remainder;
+ boolean hyperReader = false;
+ boolean withIndex = false;
+
+ public Builder addId(int id){
+ ids.add(id);
+ return this;
+ }
+
+ public Builder withIndex(){
+ withIndex = true;
+ return this;
+ }
+
+ public Builder remainder(PathSegment remainder){
+ this.remainder = remainder;
+ return this;
+ }
+
+ public Builder hyper(){
+ this.hyperReader = true;
+ return this;
+ }
+
+ public Builder finalType(MajorType finalType){
+ this.finalType = finalType;
+ return this;
+ }
+
+ public Builder intermediateType(MajorType intermediateType){
+ this.intermediateType = intermediateType;
+ return this;
+ }
+
+ public TypedFieldId build(){
+ Preconditions.checkNotNull(intermediateType);
+ Preconditions.checkNotNull(finalType);
+
+ if(intermediateType == null) intermediateType = finalType;
+ MajorType actualFinalType = finalType;
+ MajorType secondaryFinal = finalType;
+
+ // if this has an index, switch to required type for output
+ if(withIndex && intermediateType == finalType) actualFinalType = finalType.toBuilder().setMode(DataMode.REQUIRED).build();
+
+ // if this isn't a direct access, switch the final type to nullable as offsets may be null.
+ // TODO: there is a bug here with some things.
+ if(intermediateType != finalType) actualFinalType = finalType.toBuilder().setMode(DataMode.OPTIONAL).build();
+
+ return new TypedFieldId(intermediateType, secondaryFinal, actualFinalType, hyperReader, remainder, ids.toArray());
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + Arrays.hashCode(fieldIds);
+ result = prime * result + ((finalType == null) ? 0 : finalType.hashCode());
+ result = prime * result + ((intermediateType == null) ? 0 : intermediateType.hashCode());
+ result = prime * result + (isHyperReader ? 1231 : 1237);
+ result = prime * result + ((remainder == null) ? 0 : remainder.hashCode());
+ result = prime * result + ((secondaryFinal == null) ? 0 : secondaryFinal.hashCode());
+ return result;
}
@Override
@@ -56,20 +196,32 @@ public class TypedFieldId {
if (getClass() != obj.getClass())
return false;
TypedFieldId other = (TypedFieldId) obj;
- if (fieldId != other.fieldId)
+ if (!Arrays.equals(fieldIds, other.fieldIds))
return false;
- if (type == null) {
- if (other.type != null)
+ if (finalType == null) {
+ if (other.finalType != null)
return false;
- } else if (!type.equals(other.type))
+ } else if (!finalType.equals(other.finalType))
+ return false;
+ if (intermediateType == null) {
+ if (other.intermediateType != null)
+ return false;
+ } else if (!intermediateType.equals(other.intermediateType))
+ return false;
+ if (isHyperReader != other.isHyperReader)
+ return false;
+ if (remainder == null) {
+ if (other.remainder != null)
+ return false;
+ } else if (!remainder.equals(other.remainder))
+ return false;
+ if (secondaryFinal == null) {
+ if (other.secondaryFinal != null)
+ return false;
+ } else if (!secondaryFinal.equals(other.secondaryFinal))
return false;
return true;
}
- @Override
- public String toString() {
- return "TypedFieldId [type=" + type + ", fieldId=" + fieldId + ", isSuperReader=" + isHyperReader + "]";
- }
-
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessible.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessible.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessible.java
index a8100b2..474a0a6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessible.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorAccessible.java
@@ -28,7 +28,7 @@ import org.apache.drill.exec.vector.ValueVector;
* To change this template use File | Settings | File Templates.
*/
public interface VectorAccessible extends Iterable<VectorWrapper<?>> {
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz);
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... fieldIds);
public TypedFieldId getValueVectorId(SchemaPath path);
public BatchSchema getSchema();
public int getRecordCount();
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
index 25289a8..1c7714e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java
@@ -22,13 +22,15 @@ import java.util.Iterator;
import java.util.List;
import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.AbstractMapVector;
import com.beust.jcommander.internal.Lists;
import com.google.common.base.Preconditions;
-public class VectorContainer implements Iterable<VectorWrapper<?>>, VectorAccessible {
+public class VectorContainer extends AbstractMapVector implements Iterable<VectorWrapper<?>>, VectorAccessible {
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(VectorContainer.class);
protected final List<VectorWrapper<?>> wrappers = Lists.newArrayList();
@@ -61,6 +63,10 @@ public class VectorContainer implements Iterable<VectorWrapper<?>>, VectorAccess
add(vv, releasable);
}
+ public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz){
+ return null;
+ }
+
/**
* Get a set of transferred clones of this container. Note that this guarantees that the vectors in the cloned
* container have the same TypedFieldIds as the existing container, allowing interchangeability in generated code. In
@@ -94,7 +100,7 @@ public class VectorContainer implements Iterable<VectorWrapper<?>>, VectorAccess
schema = null;
int i = wrappers.size();
wrappers.add(SimpleVectorWrapper.create(vv));
- return new TypedFieldId(vv.getField().getType(), i, false);
+ return new TypedFieldId(vv.getField().getType(), i);
}
public void add(ValueVector[] hyperVector) {
@@ -129,29 +135,33 @@ public class VectorContainer implements Iterable<VectorWrapper<?>>, VectorAccess
public TypedFieldId getValueVectorId(SchemaPath path) {
for (int i = 0; i < wrappers.size(); i++) {
VectorWrapper<?> va = wrappers.get(i);
- SchemaPath w = va.getField().getAsSchemaPath();
- if (w.equals(path)){
- return new TypedFieldId(va.getField().getType(), i, va.isHyper());
+ TypedFieldId id = va.getFieldIdIfMatches(i, path);
+ if(id != null){
+ return id;
}
}
- if(path.getRootSegment().isNamed() && path.getRootSegment().getNameSegment().getPath().equals("_MAP") && path.getRootSegment().isLastPath()) throw new UnsupportedOperationException("Drill does not yet support map references.");
return null;
}
+
+
@Override
- public VectorWrapper<?> getValueAccessorById(int fieldId, Class<?> clazz) {
- VectorWrapper<?> va = wrappers.get(fieldId);
- if(va!= null && clazz == null){
- return (VectorWrapper<?>) va;
- }
- if (va != null && va.getVectorClass() != clazz) {
+ public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... fieldIds) {
+ Preconditions.checkArgument(fieldIds.length >= 1);
+ VectorWrapper<?> va = wrappers.get(fieldIds[0]);
+
+ if(va == null) return null;
+
+ if (fieldIds.length == 1 && clazz != null && !clazz.isAssignableFrom(va.getVectorClass())) {
throw new IllegalStateException(String.format(
"Failure while reading vector. Expected vector class of %s but was holding vector class %s.",
clazz.getCanonicalName(), va.getVectorClass().getCanonicalName()));
}
- return (VectorWrapper<?>) va;
+
+ return (VectorWrapper<?>) va.getChildWrapper(fieldIds);
+
}
public BatchSchema getSchema() {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorWrapper.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorWrapper.java
index 401b50e..dc8ffe5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorWrapper.java
@@ -17,11 +17,14 @@
*/
package org.apache.drill.exec.record;
+import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.vector.ValueVector;
public interface VectorWrapper<T extends ValueVector> {
+
+
public Class<T> getVectorClass();
public MaterializedField getField();
public T getValueVector();
@@ -29,4 +32,11 @@ public interface VectorWrapper<T extends ValueVector> {
public boolean isHyper();
public void clear();
public VectorWrapper<T> cloneAndTransfer();
+ public VectorWrapper<?> getChildWrapper(int[] ids);
+
+ /**
+ * Traverse the object graph and determine whether the provided SchemaPath matches data within the Wrapper. If so, return a TypedFieldId associated with this path.
+ * @return TypedFieldId
+ */
+ public TypedFieldId getFieldIdIfMatches(int id, SchemaPath expectedPath);
}
[06/10] Add support for RepeatedMapVector,
MapVector and RepeatedListVector.
Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java
new file mode 100644
index 0000000..2492cc8
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/RepeatedMapVector.java
@@ -0,0 +1,478 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import io.netty.buffer.ByteBuf;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.expr.TypeHelper;
+import org.apache.drill.exec.expr.holders.ComplexHolder;
+import org.apache.drill.exec.expr.holders.RepeatedMapHolder;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.OutOfMemoryRuntimeException;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.record.TransferPair;
+import org.apache.drill.exec.vector.RepeatedFixedWidthVector;
+import org.apache.drill.exec.vector.UInt4Vector;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.impl.NullReader;
+import org.apache.drill.exec.vector.complex.impl.RepeatedMapReaderImpl;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+import com.carrotsearch.hppc.IntObjectOpenHashMap;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+public class RepeatedMapVector extends AbstractContainerVector implements RepeatedFixedWidthVector {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RepeatedMapVector.class);
+
+ public final static MajorType TYPE = MajorType.newBuilder().setMinorType(MinorType.MAP).setMode(DataMode.REPEATED).build();
+
+ private final UInt4Vector offsets; // offsets to start of each record
+ private final Map<String, ValueVector> vectors = Maps.newHashMap();
+ private final Map<String, VectorWithOrdinal> vectorIds = Maps.newHashMap();
+ private final RepeatedMapReaderImpl reader = new RepeatedMapReaderImpl(RepeatedMapVector.this);
+ private final IntObjectOpenHashMap<ValueVector> vectorsById = new IntObjectOpenHashMap<>();
+ private final Accessor accessor = new Accessor();
+ private final Mutator mutator = new Mutator();
+ private final BufferAllocator allocator;
+ private final MaterializedField field;
+ private int lastSet = 0;
+
+ public RepeatedMapVector(MaterializedField field, BufferAllocator allocator){
+ this.field = field;
+ this.allocator = allocator;
+ this.offsets = new UInt4Vector(null, allocator);
+
+ }
+
+ @Override
+ public void allocateNew(int parentValueCount, int childValueCount) {
+ clear();
+ offsets.allocateNew(parentValueCount+1);
+ mutator.reset();
+ accessor.reset();
+ }
+
+ public Iterator<String> fieldNameIterator(){
+ return vectors.keySet().iterator();
+ }
+
+ public int size(){
+ return vectors.size();
+ }
+
+ @Override
+ public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
+ ValueVector v = vectors.get(name);
+
+ if(v == null){
+ v = TypeHelper.getNewVector(field.getPath(), name, allocator, type);
+ Preconditions.checkNotNull(v, String.format("Failure to create vector of type %s.", type));
+ put(name, v);
+ }
+ return typeify(v, clazz);
+
+ }
+
+ @Override
+ public <T extends ValueVector> T get(String name, Class<T> clazz) {
+ ValueVector v = vectors.get(name);
+ if(v == null) throw new IllegalStateException(String.format("Attempting to access invalid map field of name %s.", name));
+ return typeify(v, clazz);
+ }
+
+ @Override
+ public int getBufferSize() {
+ if(accessor.getValueCount() == 0 || vectors.isEmpty()) return 0;
+ long buffer = offsets.getBufferSize();
+ for(ValueVector v : this){
+ buffer += v.getBufferSize();
+ }
+
+ return (int) buffer;
+ }
+
+ @Override
+ public void close() {
+ for(ValueVector v : this){
+ v.close();
+ }
+ }
+
+ @Override
+ public Iterator<ValueVector> iterator() {
+ return vectors.values().iterator();
+ }
+
+ @Override
+ public MaterializedField getField() {
+ return field;
+ }
+
+ @Override
+ public TransferPair getTransferPair() {
+ return new MapTransferPair(field.getPath());
+ }
+
+ @Override
+ public TransferPair makeTransferPair(ValueVector to) {
+ return new MapTransferPair( (RepeatedMapVector) to);
+ }
+
+ MapSingleCopier makeSingularCopier(MapVector to){
+ return new MapSingleCopier(to);
+ }
+
+
+ class MapSingleCopier{
+ private final TransferPair[] pairs;
+ final RepeatedMapVector from = RepeatedMapVector.this;
+
+ public MapSingleCopier(MapVector to){
+ pairs = new TransferPair[vectors.size()];
+ int i =0;
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ int preSize = to.vectors.size();
+ ValueVector v = to.addOrGet(e.getKey(), e.getValue().getField().getType(), e.getValue().getClass());
+ if(to.vectors.size() != preSize) v.allocateNew();
+ pairs[i++] = e.getValue().makeTransferPair(v);
+ }
+ }
+
+ public boolean copySafe(int fromSubIndex, int toIndex){
+ for(TransferPair p : pairs){
+ if(!p.copyValueSafe(fromSubIndex, toIndex)) return false;
+ }
+ return true;
+ }
+ }
+
+ @Override
+ public TransferPair getTransferPair(FieldReference ref) {
+ return new MapTransferPair(ref);
+ }
+
+ @Override
+ public void allocateNew() throws OutOfMemoryRuntimeException {
+ if(!allocateNewSafe()) throw new OutOfMemoryRuntimeException();
+ }
+
+ @Override
+ public boolean allocateNewSafe() {
+ if(!offsets.allocateNewSafe()) return false;
+ for(ValueVector v : vectors.values()){
+ if(!v.allocateNewSafe()) return false;
+ }
+ return true;
+ }
+
+ private class MapTransferPair implements TransferPair{
+
+ private final TransferPair[] pairs;
+ private final RepeatedMapVector to;
+ private final RepeatedMapVector from = RepeatedMapVector.this;
+
+ public MapTransferPair(SchemaPath path){
+ RepeatedMapVector v = new RepeatedMapVector(MaterializedField.create(path, TYPE), allocator);
+ pairs = new TransferPair[vectors.size()];
+ int i =0;
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ TransferPair otherSide = e.getValue().getTransferPair();
+ v.put(e.getKey(), otherSide.getTo());
+ pairs[i++] = otherSide;
+ }
+ this.to = v;
+ }
+
+ public MapTransferPair(RepeatedMapVector to){
+ this.to = to;
+ pairs = new TransferPair[vectors.size()];
+ int i =0;
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ ValueVector v = to.addOrGet(e.getKey(), e.getValue().getField().getType(), e.getValue().getClass());
+ pairs[i++] = e.getValue().makeTransferPair(v);
+ }
+ }
+
+
+ @Override
+ public void transfer() {
+ offsets.transferTo(to.offsets);
+ for(TransferPair p : pairs){
+ p.transfer();
+ }
+ clear();
+ }
+
+ @Override
+ public ValueVector getTo() {
+ return to;
+ }
+
+ @Override
+ public boolean copyValueSafe(int from, int to) {
+ RepeatedMapHolder holder = new RepeatedMapHolder();
+ accessor.get(from, holder);
+ int newIndex = this.to.offsets.getAccessor().get(to);
+ //todo: make these bulk copies
+ for(int i = holder.start; i < holder.end; i++, newIndex++){
+ for(TransferPair p : pairs){
+ if(!p.copyValueSafe(from, to)) return false;
+ }
+ }
+ if(!this.to.offsets.getMutator().setSafe(to, newIndex)) return false;
+ return true;
+ }
+
+ @Override
+ public void splitAndTransfer(int startIndex, int length) {
+ throw new UnsupportedOperationException();
+ }
+
+ }
+
+
+ transient private MapTransferPair ephPair;
+
+ public boolean copyFromSafe(int fromIndex, int thisIndex, RepeatedMapVector from){
+ if(ephPair == null || ephPair.from != from){
+ ephPair = (MapTransferPair) from.makeTransferPair(this);
+ }
+ return ephPair.copyValueSafe(fromIndex, thisIndex);
+ }
+
+ @Override
+ public int getValueCapacity() {
+ return offsets.getValueCapacity();
+ }
+
+ @Override
+ public Accessor getAccessor() {
+ return accessor;
+ }
+
+ @Override
+ public ByteBuf[] getBuffers() {
+ List<ByteBuf> bufs = Lists.newArrayList(offsets.getBuffers());
+
+ for(ValueVector v : vectors.values()){
+ for(ByteBuf b : v.getBuffers()){
+ bufs.add(b);
+ }
+ }
+ return bufs.toArray(new ByteBuf[bufs.size()]);
+ }
+
+
+ @Override
+ public void load(SerializedField metadata, ByteBuf buf) {
+ List<SerializedField> fields = metadata.getChildList();
+
+ int bufOffset = offsets.load(metadata.getValueCount()+1, buf);
+
+ for (SerializedField fmd : fields) {
+ MaterializedField fieldDef = MaterializedField.create(fmd);
+
+ ValueVector v = vectors.get(fieldDef.getLastName());
+ if(v == null) {
+ // if we arrive here, we didn't have a matching vector.
+
+ v = TypeHelper.getNewVector(fieldDef, allocator);
+ }
+ if (fmd.getValueCount() == 0){
+ v.clear();
+ } else {
+ v.load(fmd, buf.slice(bufOffset, fmd.getBufferLength()));
+ }
+ bufOffset += fmd.getBufferLength();
+ put(fieldDef.getLastName(), v);
+ }
+ }
+
+ @Override
+ public SerializedField getMetadata() {
+ SerializedField.Builder b = getField() //
+ .getAsBuilder() //
+ .setBufferLength(getBufferSize()) //
+ .setValueCount(accessor.getValueCount());
+
+ for(ValueVector v : vectors.values()){
+ b.addChild(v.getMetadata());
+ }
+ return b.build();
+ }
+
+ protected void put(String name, ValueVector vv){
+ int ordinal = vectors.size();
+ if(vectors.put(name, vv) != null){
+ throw new IllegalStateException();
+ }
+ vectorIds.put(name, new VectorWithOrdinal(vv, ordinal));
+ vectorsById.put(ordinal, vv);
+ field.addChild(vv.getField());
+ }
+
+
+ @Override
+ public Mutator getMutator() {
+ return mutator;
+ }
+
+ public class Accessor implements ValueVector.Accessor{
+
+ @Override
+ public Object getObject(int index) {
+ List<Object> l = Lists.newArrayList();
+ int end = offsets.getAccessor().get(index+1);
+ for(int i = offsets.getAccessor().get(index); i < end; i++){
+ Map<String, Object> vv = Maps.newHashMap();
+ for(Map.Entry<String, ValueVector> e : vectors.entrySet()){
+ ValueVector v = e.getValue();
+ String k = e.getKey();
+ Object value = v.getAccessor().getObject(i);
+ if(value != null){
+ vv.put(k,value);
+ }
+ }
+ l.add(vv);
+ }
+ return l;
+ }
+
+ @Override
+ public int getValueCount() {
+ return offsets.getAccessor().getValueCount() - 1;
+ }
+
+ public void get(int index, RepeatedMapHolder holder){
+ assert index <= getValueCapacity();
+ holder.start = offsets.getAccessor().get(index);
+ holder.end = offsets.getAccessor().get(index+1);
+ }
+
+ public void get(int index, ComplexHolder holder){
+ FieldReader reader = getReader();
+ reader.setPosition(index);
+ holder.reader = reader;
+ }
+
+ public void get(int index, int arrayIndex, ComplexHolder holder){
+ RepeatedMapHolder h = new RepeatedMapHolder();
+ get(index, h);
+ int offset = h.start + arrayIndex;
+
+ if(offset >= h.end){
+ holder.reader = NullReader.INSTANCE;
+ }else{
+ reader.setSinglePosition(index, arrayIndex);
+ holder.reader = reader;
+ }
+ }
+
+ @Override
+ public boolean isNull(int index) {
+ return false;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public FieldReader getReader() {
+ return reader;
+ }
+
+ }
+
+ private void populateEmpties(int groupCount){
+ int previousEnd = offsets.getAccessor().get(lastSet + 1);
+ for(int i = lastSet + 2; i <= groupCount; i++){
+ offsets.getMutator().setSafe(i, previousEnd);
+ }
+ lastSet = groupCount - 1;
+ }
+
+ public class Mutator implements ValueVector.Mutator{
+
+ public void startNewGroup(int index) {
+ populateEmpties(index);
+ lastSet = index;
+ offsets.getMutator().set(index+1, offsets.getAccessor().get(index));
+ }
+
+ public int add(int index){
+ int nextOffset = offsets.getAccessor().get(index+1);
+ boolean success = offsets.getMutator().setSafe(index+1, nextOffset+1);
+ if(!success) return -1;
+ return nextOffset;
+ }
+
+ @Override
+ public void setValueCount(int groupCount) {
+ populateEmpties(groupCount);
+ offsets.getMutator().setValueCount(groupCount+1);
+ int valueCount = offsets.getAccessor().get(groupCount);
+ for(ValueVector v : vectors.values()){
+ v.getMutator().setValueCount(valueCount);
+ }
+ }
+
+ @Override
+ public void reset() {
+ lastSet = 0;
+ }
+
+ @Override
+ public void generateTestData(int values) {
+ }
+
+ }
+
+ @Override
+ public void clear() {
+ lastSet = 0;
+ offsets.clear();
+ for(ValueVector v : vectors.values()){
+ v.clear();;
+ }
+ }
+
+ @Override
+ public int load(int parentValueCount, int childValueCount, ByteBuf buf) {
+ throw new UnsupportedOperationException();
+ }
+
+
+ @Override
+ public VectorWithOrdinal getVectorWithOrdinal(String name) {
+ return vectorIds.get(name);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java
new file mode 100644
index 0000000..99f6010
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/StateTool.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import java.util.Arrays;
+
+public class StateTool {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StateTool.class);
+
+ public static <T extends Enum<?>> void check(T currentState, T... expectedStates){
+ for(T s : expectedStates){
+ if(s == currentState) return;
+ }
+ throw new IllegalArgumentException(String.format("Expected to be in one of these states %s but was actuall in state %s", Arrays.toString(expectedStates), currentState));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/WriteState.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/WriteState.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/WriteState.java
new file mode 100644
index 0000000..43dba65
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/WriteState.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex;
+
+import org.apache.drill.exec.vector.complex.writer.FieldWriter;
+
+
+public class WriteState {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WriteState.class);
+
+ private FieldWriter failPoint;
+
+ public boolean isFailed(){
+ return failPoint != null;
+ }
+
+ public boolean isOk(){
+ return failPoint == null;
+ }
+
+ public void fail(FieldWriter w){
+ assert failPoint == null;
+ failPoint = w;
+
+// System.out.println("Fail Point " + failPoint);
+ }
+
+ public void reset(){
+ failPoint = null;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
new file mode 100644
index 0000000..761bc79
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.fn;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+import io.netty.buffer.UnpooledByteBufAllocator;
+
+import java.io.IOException;
+import java.io.Reader;
+
+import org.apache.drill.exec.expr.holders.BigIntHolder;
+import org.apache.drill.exec.expr.holders.BitHolder;
+import org.apache.drill.exec.expr.holders.Float8Holder;
+import org.apache.drill.exec.expr.holders.VarCharHolder;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
+import com.fasterxml.jackson.core.JsonParser.Feature;
+import com.google.common.base.Charsets;
+
+public class JsonReader {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JsonReader.class);
+
+ public static enum WriteState {
+ WRITE_SUCCEED, WRITE_FAILED, NO_MORE
+ }
+
+ private final JsonFactory factory = new JsonFactory();
+ private ByteBufInputStream stream;
+ private long byteOffset;
+ private JsonRecordSplitter splitter;
+ private Reader reader;
+ private JsonParser parser;
+
+ public JsonReader(JsonRecordSplitter splitter) throws JsonParseException, IOException {
+ this.splitter = splitter;
+ factory.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
+ factory.configure(Feature.ALLOW_COMMENTS, true);
+ reader = splitter.getNextReader();
+ }
+
+ public WriteState write(ComplexWriter writer) throws JsonParseException, IOException {
+ if(reader == null){
+ reader = splitter.getNextReader();
+ if(reader == null) return WriteState.NO_MORE;
+
+ }
+
+ parser = factory.createJsonParser(reader);
+ reader.mark(1024*128);
+ JsonToken t = parser.nextToken();
+ while(!parser.hasCurrentToken()) t = parser.nextToken();
+
+
+ switch (t) {
+ case START_OBJECT:
+ writeData(writer.rootAsMap());
+ break;
+ case START_ARRAY:
+ writeData(writer.rootAsList());
+ break;
+ case NOT_AVAILABLE:
+ return WriteState.NO_MORE;
+ default:
+ throw new JsonParseException(
+ String.format("Failure while parsing JSON. Found token of [%s] Drill currently only supports parsing "
+ + "json strings that contain either lists or maps. The root object cannot be a scalar.",
+ t),
+ parser.getCurrentLocation());
+ }
+
+ if (!writer.ok()) {
+ reader.reset();
+ return WriteState.WRITE_FAILED;
+ } else {
+ reader = null;
+ return WriteState.WRITE_SUCCEED;
+ }
+ }
+
+
+ private void writeData(MapWriter map) throws JsonParseException, IOException {
+ //
+ map.start();
+ outside: while(true){
+ JsonToken t = parser.nextToken();
+ if(t == JsonToken.NOT_AVAILABLE || t == JsonToken.END_OBJECT) return;
+
+ assert t == JsonToken.FIELD_NAME : String.format("Expected FIELD_NAME but got %s.", t.name());
+ final String fieldName = parser.getText();
+
+
+ switch(parser.nextToken()){
+ case START_ARRAY:
+ writeData(map.list(fieldName));
+ break;
+ case START_OBJECT:
+ writeData(map.map(fieldName));
+ break;
+ case END_OBJECT:
+ break outside;
+
+ case VALUE_EMBEDDED_OBJECT:
+ case VALUE_FALSE: {
+ BitHolder h = new BitHolder();
+ h.value = 0;
+ map.bit(fieldName).write(h);
+ break;
+ }
+ case VALUE_TRUE: {
+ BitHolder h = new BitHolder();
+ h.value = 1;
+ map.bit(fieldName).write(h);
+ break;
+ }
+ case VALUE_NULL:
+ // do nothing as we don't have a type.
+ break;
+ case VALUE_NUMBER_FLOAT:
+ Float8Holder fh = new Float8Holder();
+ fh.value = parser.getDoubleValue();
+ map.float8(fieldName).write(fh);
+ break;
+ case VALUE_NUMBER_INT:
+ BigIntHolder bh = new BigIntHolder();
+ bh.value = parser.getLongValue();
+ map.bigInt(fieldName).write(bh);
+ break;
+ case VALUE_STRING:
+ VarCharHolder vh = new VarCharHolder();
+ String value = parser.getText();
+ byte[] b = value.getBytes(Charsets.UTF_8);
+ ByteBuf d = UnpooledByteBufAllocator.DEFAULT.buffer(b.length);
+ d.setBytes(0, b);
+ vh.buffer = d;
+ vh.start = 0;
+ vh.end = b.length;
+ map.varChar(fieldName).write(vh);
+ break;
+
+ default:
+ throw new IllegalStateException("Unexpected token " + parser.getCurrentToken());
+
+ }
+
+ }
+ map.end();
+
+ }
+
+ private void writeData(ListWriter list) throws JsonParseException, IOException {
+ list.start();
+ outside: while(true){
+
+ switch(parser.nextToken()){
+ case START_ARRAY:
+ writeData(list.list());
+ break;
+ case START_OBJECT:
+ writeData(list.map());
+ break;
+ case END_ARRAY:
+ case END_OBJECT:
+ break outside;
+
+ case VALUE_EMBEDDED_OBJECT:
+ case VALUE_FALSE:{
+ BitHolder h = new BitHolder();
+ h.value = 0;
+ list.bit().write(h);
+ break;
+ }
+ case VALUE_TRUE: {
+ BitHolder h = new BitHolder();
+ h.value = 1;
+ list.bit().write(h);
+ break;
+ }
+ case VALUE_NULL:
+ // do nothing as we don't have a type.
+ break;
+ case VALUE_NUMBER_FLOAT:
+ Float8Holder fh = new Float8Holder();
+ fh.value = parser.getDoubleValue();
+ list.float8().write(fh);
+ break;
+ case VALUE_NUMBER_INT:
+ BigIntHolder bh = new BigIntHolder();
+ bh.value = parser.getLongValue();
+ list.bigInt().write(bh);
+ break;
+ case VALUE_STRING:
+ VarCharHolder vh = new VarCharHolder();
+ String value = parser.getText();
+ byte[] b = value.getBytes(Charsets.UTF_8);
+ ByteBuf d = UnpooledByteBufAllocator.DEFAULT.buffer(b.length);
+ d.setBytes(0, b);
+ vh.buffer = d;
+ vh.start = 0;
+ vh.end = b.length;
+ list.varChar().write(vh);
+
+ default:
+ throw new IllegalStateException("Unexpected token " + parser.getCurrentToken());
+ }
+ }
+ list.end();
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonRecordSplitter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonRecordSplitter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonRecordSplitter.java
new file mode 100644
index 0000000..6f6e7af
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonRecordSplitter.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.fn;
+
+import java.io.IOException;
+import java.io.Reader;
+
+public interface JsonRecordSplitter {
+
+ public abstract Reader getNextReader() throws IOException;
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
new file mode 100644
index 0000000..0624ece
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonWriter.java
@@ -0,0 +1,258 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.fn;
+
+import java.io.IOException;
+import java.io.OutputStream;
+
+import org.apache.drill.common.types.TypeProtos.DataMode;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.core.JsonGenerator;
+
+public class JsonWriter {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JsonWriter.class);
+
+ private final JsonFactory factory = new JsonFactory();
+ private final JsonGenerator gen;
+
+ public JsonWriter(OutputStream out, boolean pretty) throws IOException{
+ JsonGenerator writer = factory.createJsonGenerator(out);
+ gen = pretty ? writer.useDefaultPrettyPrinter() : writer;
+ }
+
+ public void write(FieldReader reader) throws JsonGenerationException, IOException{
+ writeValue(reader);
+ gen.flush();
+ }
+
+ private void writeValue(FieldReader reader) throws JsonGenerationException, IOException{
+ final DataMode m = reader.getType().getMode();
+ final MinorType mt = reader.getType().getMinorType();
+
+ switch(m){
+ case OPTIONAL:
+ if(!reader.isSet()){
+ gen.writeNull();
+ break;
+ }
+
+ case REQUIRED:
+
+
+ switch (mt) {
+ case FLOAT4:
+ gen.writeNumber(reader.readFloat());
+ break;
+ case FLOAT8:
+ gen.writeNumber(reader.readDouble());
+ break;
+ case INT:
+ Integer i = reader.readInteger();
+ if(i == null){
+ gen.writeNull();
+ }else{
+ gen.writeNumber(reader.readInteger());
+ }
+ break;
+ case SMALLINT:
+ gen.writeNumber(reader.readShort());
+ break;
+ case TINYINT:
+ gen.writeNumber(reader.readByte());
+ break;
+ case BIGINT:
+ Long l = reader.readLong();
+ if(l == null){
+ gen.writeNull();
+ }else{
+ gen.writeNumber(reader.readLong());
+ }
+
+ break;
+ case BIT:
+ gen.writeBoolean(reader.readBoolean());
+ break;
+
+ case DATE:
+ case TIME:
+ case TIMESTAMP:
+ case TIMESTAMPTZ:
+ gen.writeString(reader.readDateTime().toString());
+
+ case INTERVALYEAR:
+ case INTERVALDAY:
+ case INTERVAL:
+ gen.writeString(reader.readPeriod().toString());
+ break;
+ case DECIMAL28DENSE:
+ case DECIMAL28SPARSE:
+ case DECIMAL38DENSE:
+ case DECIMAL38SPARSE:
+ case DECIMAL9:
+ case DECIMAL18:
+ gen.writeNumber(reader.readBigDecimal());
+ break;
+
+ case LIST:
+ // this is a pseudo class, doesn't actually contain the real reader so we have to drop down.
+ writeValue(reader.reader());
+ break;
+ case MAP:
+ gen.writeStartObject();
+ for(String name : reader){
+ if(reader.isSet()){
+ gen.writeFieldName(name);
+ writeValue(reader.reader(name));
+ }
+ }
+ gen.writeEndObject();
+ break;
+ case NULL:
+ gen.writeNull();
+ break;
+
+ case VAR16CHAR:
+ gen.writeString(reader.readString());
+ break;
+ case VARBINARY:
+ gen.writeBinary(reader.readByteArray());
+ break;
+ case VARCHAR:
+ gen.writeString(reader.readText().toString());
+ break;
+
+ }
+ break;
+
+ case REPEATED:
+ gen.writeStartArray();
+ switch (mt) {
+ case FLOAT4:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readFloat(i));
+ }
+
+ break;
+ case FLOAT8:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readDouble(i));
+ }
+ break;
+ case INT:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readInteger(i));
+ }
+ break;
+ case SMALLINT:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readShort(i));
+ }
+ break;
+ case TINYINT:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readByte(i));
+ }
+ break;
+ case BIGINT:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readLong(i));
+ }
+ break;
+ case BIT:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeBoolean(reader.readBoolean(i));
+ }
+ break;
+
+ case DATE:
+ case TIME:
+ case TIMESTAMP:
+ case TIMESTAMPTZ:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeString(reader.readDateTime(i).toString());
+ }
+
+ case INTERVALYEAR:
+ case INTERVALDAY:
+ case INTERVAL:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeString(reader.readPeriod(i).toString());
+ }
+ break;
+ case DECIMAL28DENSE:
+ case DECIMAL28SPARSE:
+ case DECIMAL38DENSE:
+ case DECIMAL38SPARSE:
+ case DECIMAL9:
+ case DECIMAL18:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeNumber(reader.readBigDecimal(i));
+ }
+ break;
+
+ case LIST:
+ for(int i = 0; i < reader.size(); i++){
+ while(reader.next()){
+ writeValue(reader.reader());
+ }
+ }
+ break;
+ case MAP:
+ while(reader.next()){
+ gen.writeStartObject();
+ for(String name : reader){
+ FieldReader mapField = reader.reader(name);
+ if(mapField.isSet()){
+ gen.writeFieldName(name);
+ writeValue(mapField);
+ }
+ }
+ gen.writeEndObject();
+ }
+ break;
+ case NULL:
+ break;
+
+ case VAR16CHAR:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeString(reader.readString(i));
+ }
+ break;
+ case VARBINARY:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeBinary(reader.readByteArray(i));
+ }
+ break;
+ case VARCHAR:
+ for(int i = 0; i < reader.size(); i++){
+ gen.writeString(reader.readText(i).toString());
+ }
+ break;
+
+ }
+ gen.writeEndArray();
+ break;
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ReaderJSONRecordSplitter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ReaderJSONRecordSplitter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ReaderJSONRecordSplitter.java
new file mode 100644
index 0000000..0cdbf85
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/ReaderJSONRecordSplitter.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.fn;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+
+import com.google.common.io.CharStreams;
+
+public class ReaderJSONRecordSplitter implements JsonRecordSplitter {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ReaderJSONRecordSplitter.class);
+
+ private static final int OPEN_CBRACKET = '{';
+ private static final int OPEN_BRACKET = '[';
+ private static final int CLOSE_CBRACKET = '}';
+ private static final int CLOSE_BRACKET = ']';
+
+ private static final int SPACE = ' ';
+ private static final int TAB = '\t';
+ private static final int NEW_LINE = '\n';
+ private static final int FORM_FEED = '\f';
+ private static final int CR = '\r';
+
+ private long start = 0;
+ private Reader reader;
+
+ public ReaderJSONRecordSplitter(Reader reader){
+ this.reader = reader;
+ }
+
+ public ReaderJSONRecordSplitter(String str){
+ this.reader = new StringReader(str);
+ }
+
+ @Override
+ public Reader getNextReader() throws IOException{
+
+ boolean inCandidate = false;
+ boolean found = false;
+
+ reader.mark(128*1024);
+ long endOffset = start;
+ outside: while(true){
+ int c = reader.read();
+// System.out.println(b);
+ endOffset++;
+
+ if(c == -1){
+ if(inCandidate){
+ found = true;
+ }
+ break;
+ }
+
+ switch(c){
+ case CLOSE_BRACKET:
+ case CLOSE_CBRACKET:
+// System.out.print("c");
+ inCandidate = true;
+ break;
+ case OPEN_BRACKET:
+ case OPEN_CBRACKET:
+// System.out.print("o");
+ if(inCandidate){
+ found = true;
+ break outside;
+ }
+ break;
+
+ case SPACE:
+ case TAB:
+ case NEW_LINE:
+ case CR:
+ case FORM_FEED:
+// System.out.print(' ');
+ break;
+
+ default:
+// System.out.print('-');
+ inCandidate = false;
+ }
+ }
+
+ if(found){
+ long maxBytes = endOffset - 1 - start;
+ start = endOffset;
+ reader.reset();
+ return new LimitedReader(reader, (int) maxBytes);
+ }else{
+ return null;
+ }
+
+ }
+
+ private class LimitedReader extends Reader {
+
+ private final Reader incoming;
+ private final int maxBytes;
+ private int markedBytes = 0;
+ private int bytes = 0;
+
+ public LimitedReader(Reader in, int maxBytes) {
+ this.maxBytes = maxBytes;
+ this.incoming = in;
+ }
+
+ @Override
+ public int read() throws IOException {
+ if (bytes >= maxBytes){
+ return -1;
+ }else{
+ bytes++;
+ return incoming.read();
+ }
+
+
+ }
+
+
+ @Override
+ public void mark(int readAheadLimit) throws IOException {
+ incoming.mark(readAheadLimit);
+ markedBytes = bytes;
+ }
+
+ @Override
+ public void reset() throws IOException {
+ incoming.reset();
+ bytes = markedBytes;
+ }
+
+ @Override
+ public int read(char[] cbuf, int off, int len) throws IOException {
+ int outputLength = Math.min(len, maxBytes - bytes);
+ if(outputLength > 0){
+ incoming.read(cbuf, off, outputLength);
+ bytes += outputLength;
+ return outputLength;
+ }else{
+ return -1;
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ }
+
+ }
+
+ public static void main(String[] args) throws Exception{
+ String str = " { \"b\": \"hello\", \"c\": \"goodbye\", r: []}\n { \"b\": \"yellow\", \"c\": \"red\"}\n ";
+ JsonRecordSplitter splitter = new ReaderJSONRecordSplitter(new StringReader(str));
+ Reader obj = null;
+ System.out.println();
+
+ while( (obj = splitter.getNextReader()) != null){
+ System.out.println();
+ System.out.println(CharStreams.toString(obj));
+ System.out.println("===end obj===");
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/UTF8JsonRecordSplitter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/UTF8JsonRecordSplitter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/UTF8JsonRecordSplitter.java
new file mode 100644
index 0000000..e46e1bd
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/UTF8JsonRecordSplitter.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.fn;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+
+import com.google.common.base.Charsets;
+import com.google.common.io.CharStreams;
+
+public class UTF8JsonRecordSplitter implements JsonRecordSplitter {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UTF8JsonRecordSplitter.class);
+
+ private static final int OPEN_CBRACKET = '{';
+ private static final int OPEN_BRACKET = '[';
+ private static final int CLOSE_CBRACKET = '}';
+ private static final int CLOSE_BRACKET = ']';
+
+ private static final int SPACE = ' ';
+ private static final int TAB = '\t';
+ private static final int NEW_LINE = '\n';
+ private static final int FORM_FEED = '\f';
+ private static final int CR = '\r';
+
+ private long start = 0;
+ private InputStream incoming;
+
+ public UTF8JsonRecordSplitter(InputStream incoming){
+ this.incoming = new BufferedInputStream(incoming);
+ }
+
+ @Override
+ public Reader getNextReader() throws IOException{
+
+ boolean inCandidate = false;
+ boolean found = false;
+
+ incoming.mark(128*1024);
+ long endOffset = start;
+ outside: while(true){
+ int b = incoming.read();
+// System.out.println(b);
+ endOffset++;
+
+ if(b == -1){
+ if(inCandidate){
+ found = true;
+ }
+ break;
+ }
+
+ switch(b){
+ case CLOSE_BRACKET:
+ case CLOSE_CBRACKET:
+// System.out.print("c");
+ inCandidate = true;
+ break;
+ case OPEN_BRACKET:
+ case OPEN_CBRACKET:
+// System.out.print("o");
+ if(inCandidate){
+ found = true;
+ break outside;
+ }
+ break;
+
+ case SPACE:
+ case TAB:
+ case NEW_LINE:
+ case CR:
+ case FORM_FEED:
+// System.out.print(' ');
+ break;
+
+ default:
+// System.out.print('-');
+ inCandidate = false;
+ }
+ }
+
+ if(found){
+ long maxBytes = endOffset - 1 - start;
+ start = endOffset;
+ incoming.reset();
+ return new BufferedReader(new InputStreamReader(new DelInputStream(incoming, maxBytes), Charsets.UTF_8));
+ }else{
+ return null;
+ }
+
+ }
+
+ private class DelInputStream extends InputStream {
+
+ private final InputStream incoming;
+ private final long maxBytes;
+ private long bytes = 0;
+
+ public DelInputStream(InputStream in, long maxBytes) {
+ this.maxBytes = maxBytes;
+ this.incoming = in;
+ }
+
+ @Override
+ public int read() throws IOException {
+ if (bytes >= maxBytes){
+ return -1;
+ }else{
+ bytes++;
+ return incoming.read();
+ }
+
+
+ }
+
+ }
+
+ public static void main(String[] args) throws Exception{
+ byte[] str = " { \"b\": \"hello\", \"c\": \"goodbye\", r: []}\n { \"b\": \"yellow\", \"c\": \"red\"}\n ".getBytes(Charsets.UTF_8);
+ InputStream s = new ByteArrayInputStream(str);
+ JsonRecordSplitter splitter = new UTF8JsonRecordSplitter(s);
+ Reader obj = null;
+ System.out.println();
+
+ while( (obj = splitter.getNextReader()) != null){
+ System.out.println();
+ System.out.println(CharStreams.toString(obj));
+ System.out.println("===end obj===");
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
new file mode 100644
index 0000000..8f892e7
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseReader.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.impl;
+
+import java.util.Iterator;
+import java.util.Map.Entry;
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+
+
+abstract class AbstractBaseReader implements FieldReader{
+
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractBaseReader.class);
+
+ private int index;
+
+ public AbstractBaseReader() {
+ super();
+ }
+
+ public void setPosition(int index){
+ this.index = index;
+ }
+
+ int idx(){
+ return index;
+ }
+
+ @Override
+ public Iterator<String> iterator() {
+ throw new IllegalStateException("The current reader doesn't support reading as a map.");
+ }
+
+ public MajorType getType(){
+ throw new IllegalStateException("The current reader doesn't support getting type information.");
+ }
+
+ @Override
+ public boolean next() {
+ throw new IllegalStateException("The current reader doesn't support getting next information.");
+ }
+
+ @Override
+ public int size() {
+ throw new IllegalStateException("The current reader doesn't support getting size information.");
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseWriter.java
new file mode 100644
index 0000000..7aa9846
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/AbstractBaseWriter.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.impl;
+
+import org.apache.drill.exec.vector.complex.WriteState;
+import org.apache.drill.exec.vector.complex.writer.FieldWriter;
+
+
+abstract class AbstractBaseWriter implements FieldWriter{
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractBaseWriter.class);
+
+ final WriteState state;
+ final FieldWriter parent;
+ private int index;
+
+ public AbstractBaseWriter(FieldWriter parent) {
+ super();
+ this.state = parent == null ? new WriteState() : parent.getState();
+ this.parent = parent;
+ }
+
+ public FieldWriter getParent() {
+ return parent;
+ }
+
+ public boolean ok(){
+ return state.isOk();
+ }
+
+ public boolean isRoot(){
+ return parent == null;
+ }
+
+ int idx(){
+ return index;
+ }
+ protected void resetState(){
+ state.reset();
+ }
+
+ public void setPosition(int index){
+ this.index = index;
+ }
+
+ void inform(boolean outcome){
+ if(!outcome){
+ state.fail(this);
+ }
+ }
+
+ public WriteState getState(){
+ return state;
+ }
+
+ public void end(){
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
new file mode 100644
index 0000000..c6ea75b
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/ComplexWriterImpl.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.impl;
+
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.MapVector;
+import org.apache.drill.exec.vector.complex.StateTool;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
+
+import com.google.hive12.common.base.Preconditions;
+
+public class ComplexWriterImpl extends AbstractFieldWriter implements ComplexWriter{
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ComplexWriterImpl.class);
+
+ SingleMapWriter mapRoot;
+ SingleListWriter listRoot;
+ MapVector container;
+
+ Mode mode = Mode.INIT;
+ private final String name;
+
+ private enum Mode { INIT, MAP, LIST };
+
+ public ComplexWriterImpl(String name, MapVector container){
+ super(null);
+ this.name = name;
+ this.container = container;
+ }
+
+ private void check(Mode... modes){
+ StateTool.check(mode, modes);
+ }
+
+ public void reset(){
+ setPosition(0);
+ resetState();
+ }
+
+ public void clear(){
+ switch(mode){
+ case MAP:
+ mapRoot.clear();
+ break;
+ case LIST:
+ listRoot.clear();
+ break;
+ }
+ }
+
+ public void setValueCount(int count){
+ switch(mode){
+ case MAP:
+ mapRoot.setValueCount(count);
+ break;
+ case LIST:
+ listRoot.setValueCount(count);
+ break;
+ }
+ }
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ switch(mode){
+ case MAP:
+ mapRoot.setPosition(index);
+ break;
+ case LIST:
+ listRoot.setPosition(index);
+ break;
+ }
+ }
+
+
+ public MapWriter directMap(){
+ Preconditions.checkArgument(name == null);
+
+ switch(mode){
+
+ case INIT:
+ MapVector map = (MapVector) container;
+ mapRoot = new SingleMapWriter(map, this);
+ mapRoot.setPosition(idx());
+ mode = Mode.MAP;
+ break;
+
+ case MAP:
+ break;
+
+ default:
+ check(Mode.INIT, Mode.MAP);
+ }
+
+ return mapRoot;
+ }
+
+ @Override
+ public MapWriter rootAsMap() {
+ switch(mode){
+
+ case INIT:
+ MapVector map = container.addOrGet(name, Types.required(MinorType.MAP), MapVector.class);
+ mapRoot = new SingleMapWriter(map, this);
+ mapRoot.setPosition(idx());
+ mode = Mode.MAP;
+ break;
+
+ case MAP:
+ break;
+
+ default:
+ check(Mode.INIT, Mode.MAP);
+ }
+
+ return mapRoot;
+ }
+
+
+ @Override
+ public void allocate() {
+ if(mapRoot != null){
+ mapRoot.allocate();
+ }else if(listRoot != null){
+ listRoot.allocate();
+ }
+ }
+
+ @Override
+ public ListWriter rootAsList() {
+ switch(mode){
+
+ case INIT:
+ listRoot = new SingleListWriter(name, container, this);
+ listRoot.setPosition(idx());
+ mode = Mode.LIST;
+ break;
+
+ case LIST:
+ break;
+
+ default:
+ check(Mode.INIT, Mode.MAP);
+ }
+
+ return listRoot;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java
new file mode 100644
index 0000000..c555f35
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedListReaderImpl.java
@@ -0,0 +1,113 @@
+
+/*******************************************************************************
+
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.vector.complex.impl;
+
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.expr.holders.RepeatedListHolder;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.RepeatedListVector;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+public class RepeatedListReaderImpl extends AbstractFieldReader{
+ private static final int NO_VALUES = Integer.MAX_VALUE - 1;
+ private static final MajorType TYPE = Types.repeated(MinorType.LIST);
+ private final String name;
+ private final RepeatedListVector container;
+ private FieldReader reader;
+
+ public RepeatedListReaderImpl(String name, RepeatedListVector container){
+ super();
+ this.name = name;
+ this.container = container;
+ }
+
+ public MajorType getType(){
+ return TYPE;
+ }
+
+ public void copyAsValue(ListWriter writer){
+ if(currentOffset == NO_VALUES) return;
+ RepeatedListWriter impl = (RepeatedListWriter) writer;
+ impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), container));
+ }
+
+ public void copyAsField(String name, MapWriter writer){
+ if(currentOffset == NO_VALUES) return;
+ RepeatedListWriter impl = (RepeatedListWriter) writer.list(name);
+ impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), container));
+ }
+
+ private int currentOffset;
+ private int maxOffset;
+
+ public int size(){
+ return maxOffset - currentOffset;
+ }
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ RepeatedListHolder h = new RepeatedListHolder();
+ container.getAccessor().get(index, h);
+ if(h.start == h.end){
+ currentOffset = NO_VALUES;
+ }else{
+ currentOffset = h.start-1;
+ maxOffset = h.end;
+ if(reader != null) reader.setPosition(currentOffset);
+ }
+ }
+
+ public boolean next(){
+ if(currentOffset +1 < maxOffset){
+ currentOffset++;
+ if(reader != null) reader.setPosition(currentOffset);
+ return true;
+ }else{
+ currentOffset = NO_VALUES;
+ return false;
+ }
+ }
+
+ @Override
+ public Object readObject() {
+ return container.getAccessor().getObject(idx());
+ }
+
+ public FieldReader reader(){
+ if(reader == null){
+ reader = container.get(name, ValueVector.class).getAccessor().getReader();
+ reader.setPosition(currentOffset);
+ }
+ return reader;
+ }
+
+ public boolean isSet(){
+ return true;
+ }
+
+
+}
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java
new file mode 100644
index 0000000..ab778ff
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/RepeatedMapReaderImpl.java
@@ -0,0 +1,205 @@
+
+
+/*******************************************************************************
+
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.vector.complex.impl;
+
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.ObjectArrays;
+import com.google.common.base.Charsets;
+import com.google.common.collect.ObjectArrays;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.proto.SchemaDefProtos;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
+import org.apache.drill.exec.record.*;
+import org.apache.drill.exec.vector.*;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.types.TypeProtos.*;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.*;
+import org.apache.drill.exec.vector.complex.reader.*;
+import org.apache.drill.exec.vector.complex.writer.*;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+import com.sun.codemodel.JType;
+import com.sun.codemodel.JCodeModel;
+
+import java.util.Arrays;
+import java.util.Random;
+import java.util.List;
+import java.io.Closeable;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+import org.joda.time.DateTime;
+import org.joda.time.Period;
+import org.apache.hadoop.io.Text;
+
+
+
+
+
+
+
+
+
+
+
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.vector.complex.MapVector;
+
+import com.google.common.collect.Maps;
+
+@SuppressWarnings("unused")
+public class RepeatedMapReaderImpl extends AbstractFieldReader{
+ private static final int NO_VALUES = Integer.MAX_VALUE - 1;
+
+ private final RepeatedMapVector vector;
+ private final Map<String, FieldReader> fields = Maps.newHashMap();
+
+ public RepeatedMapReaderImpl(RepeatedMapVector vector) {
+ this.vector = vector;
+ }
+
+ private void setChildrenPosition(int index){
+ for(FieldReader r : fields.values()){
+ r.setPosition(index);
+ }
+ }
+
+ public FieldReader reader(String name){
+ FieldReader reader = fields.get(name);
+ if(reader == null){
+ ValueVector child = vector.get(name, ValueVector.class);
+ if(child == null){
+ reader = NullReader.INSTANCE;
+ }else{
+ reader = child.getAccessor().getReader();
+ }
+ fields.put(name, reader);
+ reader.setPosition(currentOffset);
+ }
+ return reader;
+ }
+
+
+ private int currentOffset;
+ private int maxOffset;
+
+ public int size(){
+ return maxOffset - currentOffset;
+ }
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ RepeatedMapHolder h = new RepeatedMapHolder();
+ vector.getAccessor().get(index, h);
+ if(h.start == h.end){
+ currentOffset = NO_VALUES;
+ }else{
+ currentOffset = h.start-1;
+ maxOffset = h.end;
+ setChildrenPosition(currentOffset);
+ }
+ }
+
+ public void setSinglePosition(int index, int childIndex){
+ super.setPosition(index);
+ RepeatedMapHolder h = new RepeatedMapHolder();
+ vector.getAccessor().get(index, h);
+ if(h.start == h.end){
+ currentOffset = NO_VALUES;
+ }else{
+ int singleOffset = h.start + childIndex;
+ assert singleOffset < h.end;
+ currentOffset = singleOffset;
+ maxOffset = singleOffset + 1;
+ setChildrenPosition(singleOffset);
+ }
+ }
+
+ public boolean next(){
+ if(currentOffset +1 < maxOffset){
+ setChildrenPosition(++currentOffset);
+ return true;
+ }else{
+ currentOffset = NO_VALUES;
+ return false;
+ }
+ }
+
+ @Override
+ public Object readObject() {
+ return vector.getAccessor().getObject(idx());
+ }
+
+ public MajorType getType(){
+ return vector.getField().getType();
+ }
+
+ public java.util.Iterator<String> iterator(){
+ return vector.fieldNameIterator();
+ }
+
+ @Override
+ public boolean isSet() {
+ return false;
+ }
+
+ public void copyAsValue(MapWriter writer){
+ if(currentOffset == NO_VALUES) return;
+ RepeatedMapWriter impl = (RepeatedMapWriter) writer;
+ impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+ public void copyAsValueSingle(MapWriter writer){
+ if(currentOffset == NO_VALUES) return;
+ SingleMapWriter impl = (SingleMapWriter) writer;
+ impl.inform(impl.container.copyFromSafe(currentOffset, impl.idx(), vector));
+ }
+
+ public void copyAsField(String name, MapWriter writer){
+ if(currentOffset == NO_VALUES) return;
+ RepeatedMapWriter impl = (RepeatedMapWriter) writer.map(name);
+ impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+
+}
+
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java
new file mode 100644
index 0000000..36e04a7
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleListReaderImpl.java
@@ -0,0 +1,92 @@
+
+/*******************************************************************************
+
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.vector.complex.impl;
+
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
+import org.apache.drill.exec.vector.complex.reader.FieldReader;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+
+
+
+
+
+
+
+@SuppressWarnings("unused")
+public class SingleListReaderImpl extends AbstractFieldReader{
+
+ private static final MajorType TYPE = Types.optional(MinorType.LIST);
+ private final String name;
+ private final AbstractContainerVector container;
+ private FieldReader reader;
+
+ public SingleListReaderImpl(String name, AbstractContainerVector container){
+ super();
+ this.name = name;
+ this.container = container;
+ }
+
+ public MajorType getType(){
+ return TYPE;
+ }
+
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ if(reader != null) reader.setPosition(index);
+ }
+
+ @Override
+ public Object readObject() {
+ return reader.readObject();
+ }
+
+ public FieldReader reader(){
+ if(reader == null){
+ reader = container.get(name, ValueVector.class).getAccessor().getReader();
+ setPosition(idx());
+ }
+ return reader;
+ }
+
+ @Override
+ public boolean isSet() {
+ return false;
+ }
+
+ public void copyAsValue(ListWriter writer){
+ throw new UnsupportedOperationException("Generic list copying not yet supported. Please resolve to typed list.");
+ }
+
+ public void copyAsField(String name, MapWriter writer){
+ throw new UnsupportedOperationException("Generic list copying not yet supported. Please resolve to typed list.");
+ }
+
+
+
+}
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
new file mode 100644
index 0000000..2158fcc
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/SingleMapReaderImpl.java
@@ -0,0 +1,154 @@
+
+
+/*******************************************************************************
+
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ ******************************************************************************/
+package org.apache.drill.exec.vector.complex.impl;
+
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.ObjectArrays;
+import com.google.common.base.Charsets;
+import com.google.common.collect.ObjectArrays;
+
+import io.netty.buffer.ByteBuf;
+import io.netty.buffer.ByteBufInputStream;
+
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.proto.SchemaDefProtos;
+import org.apache.drill.exec.proto.UserBitShared.SerializedField;
+import org.apache.drill.exec.record.*;
+import org.apache.drill.exec.vector.*;
+import org.apache.drill.exec.expr.holders.*;
+import org.apache.drill.common.expression.FieldReference;
+import org.apache.drill.common.types.TypeProtos.*;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.vector.complex.*;
+import org.apache.drill.exec.vector.complex.reader.*;
+import org.apache.drill.exec.vector.complex.writer.*;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+
+import com.sun.codemodel.JType;
+import com.sun.codemodel.JCodeModel;
+
+import java.util.Arrays;
+import java.util.Random;
+import java.util.List;
+import java.io.Closeable;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+
+import org.joda.time.DateTime;
+import org.joda.time.Period;
+import org.apache.hadoop.io.Text;
+
+
+
+
+
+
+
+
+
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.vector.complex.MapVector;
+
+import com.google.common.collect.Maps;
+
+@SuppressWarnings("unused")
+public class SingleMapReaderImpl extends AbstractFieldReader{
+
+ private final MapVector vector;
+ private final Map<String, FieldReader> fields = Maps.newHashMap();
+
+ public SingleMapReaderImpl(MapVector vector) {
+ this.vector = vector;
+ }
+
+ private void setChildrenPosition(int index){
+ for(FieldReader r : fields.values()){
+ r.setPosition(index);
+ }
+ }
+
+ public FieldReader reader(String name){
+ FieldReader reader = fields.get(name);
+ if(reader == null){
+ ValueVector child = vector.get(name, ValueVector.class);
+ if(child == null){
+ reader = NullReader.INSTANCE;
+ }else{
+ reader = child.getAccessor().getReader();
+ }
+ fields.put(name, reader);
+ reader.setPosition(idx());
+ }
+ return reader;
+ }
+
+ public void setPosition(int index){
+ super.setPosition(index);
+ for(FieldReader r : fields.values()){
+ r.setPosition(index);
+ }
+ }
+
+ @Override
+ public Object readObject() {
+ return vector.getAccessor().getObject(idx());
+ }
+
+ @Override
+ public boolean isSet() {
+ return true;
+ }
+
+ public MajorType getType(){
+ return vector.getField().getType();
+ }
+
+ public java.util.Iterator<String> iterator(){
+ return vector.fieldNameIterator();
+ }
+
+ public void copyAsValue(MapWriter writer){
+ SingleMapWriter impl = (SingleMapWriter) writer;
+ impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+ public void copyAsField(String name, MapWriter writer){
+ SingleMapWriter impl = (SingleMapWriter) writer.map(name);
+ impl.inform(impl.container.copyFromSafe(idx(), impl.idx(), vector));
+ }
+
+
+}
+
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
new file mode 100644
index 0000000..bc1d367
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/impl/VectorContainerWriter.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.impl;
+
+import org.apache.drill.common.types.TypeProtos.MajorType;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.physical.impl.OutputMutator;
+import org.apache.drill.exec.record.MaterializedField;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.MapVector;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;
+
+public class VectorContainerWriter extends AbstractFieldWriter implements ComplexWriter {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(VectorContainerWriter.class);
+
+ SingleMapWriter mapRoot;
+ SpecialMapVector mapVector;
+ OutputMutator mutator;
+
+ public VectorContainerWriter(OutputMutator mutator) {
+ super(null);
+ this.mutator = mutator;
+ this.mapVector = new SpecialMapVector();
+ this.mapRoot = new SingleMapWriter(mapVector, this);
+ }
+
+ public void reset() {
+ setPosition(0);
+ resetState();
+ }
+
+ public void clear() {
+ mapRoot.clear();
+ }
+
+ public SingleMapWriter getWriter() {
+ return mapRoot;
+ }
+
+ public void setValueCount(int count) {
+ mapRoot.setValueCount(count);
+ }
+
+ public void setPosition(int index) {
+ super.setPosition(index);
+ mapRoot.setPosition(index);
+ }
+
+ public MapWriter directMap() {
+ return mapRoot;
+ }
+
+ @Override
+ public void allocate() {
+ mapRoot.allocate();
+ }
+
+ private class SpecialMapVector extends MapVector {
+
+ public SpecialMapVector() {
+ super("", null);
+ }
+
+ @Override
+ public <T extends ValueVector> T addOrGet(String name, MajorType type, Class<T> clazz) {
+ try {
+ ValueVector v = mutator.addField(MaterializedField.create(name, type), clazz);
+ this.put(name, v);
+ return this.typeify(v, clazz);
+ } catch (SchemaChangeException e) {
+ throw new IllegalStateException(e);
+ }
+
+ }
+
+ }
+
+ @Override
+ public MapWriter rootAsMap() {
+ return mapRoot;
+ }
+
+ @Override
+ public ListWriter rootAsList() {
+ throw new UnsupportedOperationException(
+ "Drill doesn't support objects whose first level is a scalar or array. Objects must start as maps.");
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/reader/FieldReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/reader/FieldReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/reader/FieldReader.java
new file mode 100644
index 0000000..caa3aa6
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/reader/FieldReader.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.reader;
+
+import org.apache.drill.exec.vector.complex.reader.BaseReader.ListReader;
+import org.apache.drill.exec.vector.complex.reader.BaseReader.MapReader;
+import org.apache.drill.exec.vector.complex.reader.BaseReader.RepeatedListReader;
+import org.apache.drill.exec.vector.complex.reader.BaseReader.RepeatedMapReader;
+import org.apache.drill.exec.vector.complex.reader.BaseReader.ScalarReader;
+
+
+
+public interface FieldReader extends MapReader, ListReader, ScalarReader, RepeatedMapReader, RepeatedListReader {
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/writer/FieldWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/writer/FieldWriter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/writer/FieldWriter.java
new file mode 100644
index 0000000..3faa4f7
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/writer/FieldWriter.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.vector.complex.writer;
+
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter;
+import org.apache.drill.exec.vector.complex.writer.BaseWriter.ScalarWriter;
+
+
+
+public interface FieldWriter extends MapWriter, ListWriter, ScalarWriter {
+ public void allocate();
+ public void clear();
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
index 7c5fc49..1dba073 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/PlanTestBase.java
@@ -210,9 +210,10 @@ public class PlanTestBase extends BaseTestQuery {
loader.load(b.getHeader().getDef(), b.getData());
- VectorWrapper<?> vw = loader.getValueAccessorById(loader
- .getValueVectorId(SchemaPath.getSimplePath(columnName)).getFieldId(),
- NullableVarCharVector.class);
+ VectorWrapper<?> vw = loader.getValueAccessorById(
+ NullableVarCharVector.class, //
+ loader.getValueVectorId(SchemaPath.getSimplePath(columnName)).getFieldIds() //
+ );
System.out.println(vw.getValueVector().getField().toExpr());
ValueVector vv = vw.getValueVector();
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/cdc5daed/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java
new file mode 100644
index 0000000..4d0cbcd
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/compile/TestEvaluationVisitor.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.compile;
+
+import org.antlr.runtime.ANTLRStringStream;
+import org.antlr.runtime.CommonTokenStream;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.expression.LogicalExpression;
+import org.apache.drill.common.expression.SchemaPath;
+import org.apache.drill.common.expression.parser.ExprLexer;
+import org.apache.drill.common.expression.parser.ExprParser;
+import org.apache.drill.common.expression.parser.ExprParser.parse_return;
+import org.apache.drill.common.types.TypeProtos.MinorType;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.expr.CodeGenerator;
+import org.apache.drill.exec.expr.EvaluationVisitor;
+import org.apache.drill.exec.expr.ValueVectorReadExpression;
+import org.apache.drill.exec.expr.ValueVectorWriteExpression;
+import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
+import org.apache.drill.exec.physical.impl.project.Projector;
+import org.apache.drill.exec.record.TypedFieldId;
+import org.junit.Test;
+
+public class TestEvaluationVisitor {
+ static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestEvaluationVisitor.class);
+
+
+ @Test
+ public void x() throws Exception{
+ DrillConfig c = DrillConfig.create();
+
+ FunctionImplementationRegistry reg = new FunctionImplementationRegistry(c);
+ EvaluationVisitor v = new EvaluationVisitor(reg);
+ CodeGenerator<?> g = CodeGenerator.get(Projector.TEMPLATE_DEFINITION, reg);
+ SchemaPath path = (SchemaPath) getExpr("a.b[4][2].c[6]");
+
+ TypedFieldId id = TypedFieldId.newBuilder() //
+ .addId(1) //
+ .addId(3) //
+ .remainder(path.getRootSegment()) //
+ .intermediateType(Types.optional(MinorType.MAP))
+ .finalType(Types.repeated(MinorType.MAP)) //
+ .hyper() //
+ .withIndex() //
+ .build();
+
+ ValueVectorReadExpression e = new ValueVectorReadExpression(id);
+
+ TypedFieldId outId = TypedFieldId.newBuilder() //
+ .addId(1) //
+ .finalType(Types.repeated(MinorType.MAP)) //
+ .intermediateType(Types.repeated(MinorType.MAP)) //
+ .build();
+ ValueVectorWriteExpression e2 = new ValueVectorWriteExpression(outId, e, true);
+
+ v.addExpr(e2, g.getRoot());
+ System.out.println(g.generate());
+ }
+
+ private LogicalExpression getExpr(String expr) throws Exception{
+ ExprLexer lexer = new ExprLexer(new ANTLRStringStream(expr));
+ CommonTokenStream tokens = new CommonTokenStream(lexer);
+
+// tokens.fill();
+// for(Token t : (List<Token>) tokens.getTokens()){
+// System.out.println(t + "" + t.getType());
+// }
+// tokens.rewind();
+
+ ExprParser parser = new ExprParser(tokens);
+ parse_return ret = parser.parse();
+
+ return ret.e;
+
+ }
+}