You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by mb...@apache.org on 2019/02/04 20:59:05 UTC

[asterixdb] 02/03: Merge commit '2dc111d' from 'stabilization-f69489' into 'master'

This is an automated email from the ASF dual-hosted git repository.

mblow pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/asterixdb.git

commit 5ae6b5d6da66e4d047e4549e39fe8c661a392f1e
Merge: 53a5c22 2dc111d
Author: Michael Blow <mb...@apache.org>
AuthorDate: Mon Feb 4 10:17:13 2019 -0500

    Merge commit '2dc111d' from 'stabilization-f69489' into 'master'
    
    Change-Id: I84505e94bdfc916d389288b36612081e778b1e53

 .../org/apache/asterix/translator/ExecutionPlans.java     |  1 +
 .../org/apache/asterix/translator/IStatementExecutor.java |  2 ++
 .../org/apache/asterix/translator/ResultProperties.java   |  1 +
 .../asterix/common/exceptions/ReplicationException.java   |  1 +
 .../asterix/common/messaging/CcIdentifiedMessage.java     |  2 ++
 .../org/apache/asterix/common/utils/NcLocalCounters.java  |  1 +
 .../asterix/lang/sqlpp/parser/SqlppParseException.java    |  1 +
 .../org/apache/asterix/metadata/CachingTxnIdFactory.java  |  1 +
 .../CompactionPolicyTupleTranslator.java                  |  2 ++
 .../DatasourceAdapterTupleTranslator.java                 |  1 +
 .../entitytupletranslators/DatatypeTupleTranslator.java   |  2 ++
 .../entitytupletranslators/DataverseTupleTranslator.java  |  2 ++
 .../ExternalFileTupleTranslator.java                      |  2 ++
 .../FeedConnectionTupleTranslator.java                    |  1 +
 .../entitytupletranslators/FeedPolicyTupleTranslator.java |  2 ++
 .../entitytupletranslators/FeedTupleTranslator.java       |  2 ++
 .../entitytupletranslators/FunctionTupleTranslator.java   |  2 ++
 .../entitytupletranslators/LibraryTupleTranslator.java    |  2 ++
 .../entitytupletranslators/NodeTupleTranslator.java       |  1 +
 .../src/main/java/org/apache/asterix/om/base/AString.java |  1 +
 .../asterix/om/exceptions/IncompatibleTypeException.java  |  1 +
 .../asterix/om/exceptions/InvalidExpressionException.java |  1 +
 .../asterix/om/exceptions/TypeMismatchException.java      |  1 +
 .../om/exceptions/UnsupportedItemTypeException.java       |  1 +
 .../asterix/om/exceptions/UnsupportedTypeException.java   |  1 +
 .../org/apache/asterix/om/functions/FunctionInfo.java     |  1 +
 .../functions/AbstractNumericArithmeticEval.java          |  2 +-
 .../runtime/evaluators/functions/ToObjectDescriptor.java  |  2 ++
 .../runtime/exceptions/IncompatibleTypeException.java     |  1 +
 .../runtime/exceptions/InvalidDataFormatException.java    |  1 +
 .../asterix/runtime/exceptions/OverflowException.java     |  2 ++
 .../asterix/runtime/exceptions/TypeMismatchException.java |  1 +
 .../asterix/runtime/exceptions/UnderflowException.java    |  1 +
 .../runtime/exceptions/UnsupportedItemTypeException.java  |  1 +
 .../runtime/exceptions/UnsupportedTypeException.java      |  1 +
 .../asterix/runtime/functions/FunctionCollection.java     |  1 +
 .../transaction/management/service/logging/LogBuffer.java |  1 +
 .../api/client/HyracksClientInterfaceFunctions.java       |  1 +
 .../apache/hyracks/api/job/resource/ClusterCapacity.java  |  1 +
 .../org/apache/hyracks/api/job/resource/NodeCapacity.java |  1 +
 .../org/apache/hyracks/client/stats/AggregateCounter.java |  1 +
 .../hyracks/control/common/config/ConfigManager.java      |  6 ++++++
 .../hyracks/control/common/config/IConfigSetter.java      |  2 ++
 .../hyracks/control/common/controllers/CCConfig.java      |  1 +
 .../control/common/job/profiling/OperatorStats.java       |  1 +
 .../control/common/job/profiling/StatsCollector.java      |  1 +
 .../control/common/job/profiling/counters/Counter.java    |  2 ++
 .../hyracks/data/std/primitive/DoublePointable.java       |  2 ++
 .../hyracks/data/std/primitive/IntegerPointable.java      |  2 ++
 .../dataflow/common/data/partition/range/RangeMap.java    | 15 ++++++++-------
 .../am/rtree/frames/RTreeNSMInteriorFrameFactory.java     |  2 ++
 .../storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java |  1 +
 52 files changed, 81 insertions(+), 8 deletions(-)

diff --cc hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/RangeMap.java
index 714e3c0,0495cfa..3719144
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/RangeMap.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/RangeMap.java
@@@ -19,79 -19,72 +19,80 @@@
  package org.apache.hyracks.dataflow.common.data.partition.range;
  
  import java.io.Serializable;
 -
 -import org.apache.hyracks.data.std.api.IPointable;
 -import org.apache.hyracks.data.std.primitive.VoidPointable;
 +import java.util.Arrays;
- import java.util.Objects;
  
  /**
 - * The range map stores the field split values in an byte array.
 - * The first split value for each field followed by the second split value for each field, etc.
 + * <pre>
 + * The range map stores the fields split values in a byte array.
 + * The first split value for each field followed by the second split value for each field, etc. For example:
 + *                  split_point_idx0    split_point_idx1    split_point_idx2    split_point_idx3    split_point_idx4
 + * in the byte[]:   f0,f1,f2            f0,f1,f2            f0,f1,f2            f0,f1,f2            f0,f1,f2
 + * numFields would be = 3
 + * we have 5 split points, which gives us 6 partitions:
 + *      p1  |       p2      |       p3      |       p4      |       p5      |       p6
 + *          sp0             sp1             sp2             sp3             sp4
 + * endOffsets.length would be = 15
 + * </pre>
   */
 -public class RangeMap implements IRangeMap, Serializable {
 +public class RangeMap implements Serializable {
-     private final int numFields;
+     private static final long serialVersionUID = -7523433293419648234L;
+ 
+     private final int fields;
      private final byte[] bytes;
 -    private final int[] offsets;
 +    private final int[] endOffsets;
  
 -    public RangeMap(int fields, byte[] bytes, int[] offsets) {
 -        this.fields = fields;
 +    public RangeMap(int numFields, byte[] bytes, int[] endOffsets) {
-         this.numFields = numFields;
++        this.fields = numFields;
          this.bytes = bytes;
 -        this.offsets = offsets;
 -    }
 -
 -    @Override
 -    public IPointable getFieldSplit(int columnIndex, int splitIndex) {
 -        IPointable p = VoidPointable.FACTORY.createPointable();
 -        int index = getFieldIndex(columnIndex, splitIndex);
 -        p.set(bytes, getFieldStart(index), getFieldLength(index));
 -        return p;
 +        this.endOffsets = endOffsets;
      }
  
 -    @Override
      public int getSplitCount() {
-         return endOffsets.length / numFields;
 -        return offsets.length / fields;
++        return endOffsets.length / fields;
      }
  
 -    @Override
 -    public byte[] getByteArray(int columnIndex, int splitIndex) {
 +    public byte[] getByteArray() {
          return bytes;
      }
  
 -    @Override
 -    public int getTag(int columnIndex, int splitIndex) {
 -        return getFieldTag(getFieldIndex(columnIndex, splitIndex));
 +    public int getTag(int fieldIndex, int splitIndex) {
 +        return getSplitValueTag(getSplitValueIndex(fieldIndex, splitIndex));
      }
  
 -    @Override
 -    public int getStartOffset(int columnIndex, int splitIndex) {
 -        return getFieldStart(getFieldIndex(columnIndex, splitIndex));
 +    public int getStartOffset(int fieldIndex, int splitIndex) {
 +        return getSplitValueStart(getSplitValueIndex(fieldIndex, splitIndex));
      }
  
 -    @Override
 -    public int getLength(int columnIndex, int splitIndex) {
 -        return getFieldLength(getFieldIndex(columnIndex, splitIndex));
 +    public int getLength(int fieldIndex, int splitIndex) {
 +        return getSplitValueLength(getSplitValueIndex(fieldIndex, splitIndex));
      }
  
 -    private int getFieldIndex(int columnIndex, int splitIndex) {
 -        return splitIndex * fields + columnIndex;
 +    /** Translates fieldIndex & splitIndex into an index which is used to find information about that split value.
 +     * The combination of a fieldIndex & splitIndex uniquely identifies a split value of interest.
 +     * @param fieldIndex the field index within the splitIndex of interest (0 <= fieldIndex < numFields)
 +     * @param splitIndex starts with 0,1,2,.. etc
 +     * @return the index of the desired split value that could be used with {@code bytes} & {@code endOffsets}.
 +     */
 +    private int getSplitValueIndex(int fieldIndex, int splitIndex) {
-         return splitIndex * numFields + fieldIndex;
++        return splitIndex * fields + fieldIndex;
      }
  
 -    private int getFieldTag(int index) {
 -        return bytes[getFieldStart(index)];
 +    /**
 +     * @param splitValueIndex is the combination of the split index + the field index within that split index
 +     * @return the type tag of a specific field in a specific split point
 +     */
 +    private int getSplitValueTag(int splitValueIndex) {
 +        return bytes[getSplitValueStart(splitValueIndex)];
      }
  
 -    private int getFieldStart(int index) {
 +    /**
 +     * @param splitValueIndex is the combination of the split index + the field index within that split index
 +     * @return the location of a split value in the byte array {@code bytes}
 +     */
 +    private int getSplitValueStart(int splitValueIndex) {
          int start = 0;
 -        if (index != 0) {
 -            start = offsets[index - 1];
 +        if (splitValueIndex != 0) {
 +            start = endOffsets[splitValueIndex - 1];
          }
          return start;
      }
@@@ -108,21 -97,4 +109,21 @@@
          return length;
      }
  
 +    @Override
 +    public int hashCode() {
-         return numFields + Arrays.hashCode(bytes) + Arrays.hashCode(endOffsets);
++        return fields + Arrays.hashCode(bytes) + Arrays.hashCode(endOffsets);
 +    }
 +
 +    @Override
 +    public boolean equals(Object object) {
 +        if (this == object) {
 +            return true;
 +        }
 +        if (!(object instanceof RangeMap)) {
 +            return false;
 +        }
 +        RangeMap other = (RangeMap) object;
-         return numFields == other.numFields && Arrays.equals(endOffsets, other.endOffsets)
++        return fields == other.fields && Arrays.equals(endOffsets, other.endOffsets)
 +                && Arrays.equals(bytes, other.bytes);
 +    }
  }