You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by mb...@apache.org on 2018/01/28 20:52:49 UTC

[08/29] asterixdb git commit: [ASTERIXDB-2256] Reformat sources using code format template

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
index ee5a041..dc66d19 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
@@ -55,8 +55,8 @@ public class FieldHashPartitionComputerFactory implements ITuplePartitionCompute
                     IBinaryHashFunction hashFn = hashFunctions[j];
                     int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
                     int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
-                    int fh = hashFn
-                            .hash(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd - fStart);
+                    int fh = hashFn.hash(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+                            fEnd - fStart);
                     h = h * 31 + fh;
                 }
                 if (h < 0) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
index 458171c..4385bd5 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
@@ -30,7 +30,8 @@ public class FieldHashPartitionComputerFamily implements ITuplePartitionComputer
     private final int[] hashFields;
     private final IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories;
 
-    public FieldHashPartitionComputerFamily(int[] hashFields, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories) {
+    public FieldHashPartitionComputerFamily(int[] hashFields,
+            IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories) {
         this.hashFields = hashFields;
         this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
     }
@@ -52,8 +53,8 @@ public class FieldHashPartitionComputerFamily implements ITuplePartitionComputer
                     IBinaryHashFunction hashFn = hashFunctions[j];
                     int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
                     int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
-                    int fh = hashFn
-                            .hash(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd - fStart);
+                    int fh = hashFn.hash(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+                            fEnd - fStart);
                     h += fh;
                 }
                 if (h < 0) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
index f4da9bf..d58a248 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
@@ -86,8 +86,8 @@ public class FieldRangePartitionComputerFactory implements ITuplePartitionComput
                     int fIdx = rangeFields[f];
                     int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
                     int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
-                    c = comparators[f].compare(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd
-                            - fStart, rangeMap.getByteArray(fieldIndex, f), rangeMap.getStartOffset(fieldIndex, f),
+                    c = comparators[f].compare(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+                            fEnd - fStart, rangeMap.getByteArray(fieldIndex, f), rangeMap.getStartOffset(fieldIndex, f),
                             rangeMap.getLength(fieldIndex, f));
                     if (c != 0) {
                         return c;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
index 81f06da..e99a2ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
@@ -112,7 +112,8 @@ public class SerdeUtils {
         return f.createBinaryComparator();
     }
 
-    public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes, int numSerdes) {
+    public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes,
+            int numSerdes) {
         IBinaryComparatorFactory[] comparatorsFactories = new IBinaryComparatorFactory[numSerdes];
         for (int i = 0; i < numSerdes; i++) {
             comparatorsFactories[i] = serdeToComparatorFactory(serdes[i]);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
index 52bf893..08ed922 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
@@ -67,7 +67,7 @@ public class TupleUtils {
     }
 
     public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, boolean filtered,
-                                          final int... fields) throws HyracksDataException {
+            final int... fields) throws HyracksDataException {
         DataOutput dos = tupleBuilder.getDataOutput();
         tupleBuilder.reset();
         for (final int i : fields) {
@@ -88,8 +88,8 @@ public class TupleUtils {
 
     public static ITupleReference createIntegerTuple(boolean filtered, final int... fields)
             throws HyracksDataException {
-        ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fields.length + 1)
-                : new ArrayTupleBuilder(fields.length);
+        ArrayTupleBuilder tupleBuilder =
+                filtered ? new ArrayTupleBuilder(fields.length + 1) : new ArrayTupleBuilder(fields.length);
         ArrayTupleReference tuple = new ArrayTupleReference();
         createIntegerTuple(tupleBuilder, tuple, fields);
         return tuple;
@@ -122,8 +122,8 @@ public class TupleUtils {
         StringBuilder strBuilder = new StringBuilder();
         int numPrintFields = Math.min(tuple.getFieldCount(), fields.length);
         for (int i = 0; i < numPrintFields; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
+            ByteArrayInputStream inStream =
+                    new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
             DataInput dataIn = new DataInputStream(inStream);
             Object o = fields[i].deserialize(dataIn);
             strBuilder.append(o.toString());
@@ -139,8 +139,8 @@ public class TupleUtils {
         int numFields = Math.min(tuple.getFieldCount(), fields.length);
         Object[] objs = new Object[numFields];
         for (int i = 0; i < numFields; i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
+            ByteArrayInputStream inStream =
+                    new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
             DataInput dataIn = new DataInputStream(inStream);
             objs[i] = fields[i].deserialize(dataIn);
         }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
index 766c77a..c39648c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
@@ -155,8 +155,8 @@ public class FrameFixedFieldTupleAppenderTest {
 
     private IFrameTupleAccessor prepareData(DATA_TYPE type) throws HyracksDataException {
         IFrameTupleAccessor accessor = new FrameTupleAccessor(recordDescriptor);
-        IFrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)),
-                true);
+        IFrameTupleAppender appender =
+                new FrameTupleAppender(new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)), true);
         int i = 0;
         do {
             switch (type) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
index e338961..c651292 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
@@ -92,7 +92,7 @@ public abstract class AbstractOperatorDescriptor implements IOperatorDescriptor
     }
 
     @Override
-    public ObjectNode toJSON()  {
+    public ObjectNode toJSON() {
         ObjectMapper om = new ObjectMapper();
         ObjectNode jop = om.createObjectNode();
         jop.put("id", String.valueOf(getOperatorId()));

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
index 93a8120..6ae5b28 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
@@ -22,8 +22,8 @@ import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
-public abstract class AbstractUnaryInputOperatorNodePushable extends AbstractOperatorNodePushable implements
-        IFrameWriter {
+public abstract class AbstractUnaryInputOperatorNodePushable extends AbstractOperatorNodePushable
+        implements IFrameWriter {
     protected IFrameWriter writer;
     protected RecordDescriptor recordDesc;
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
index 4499e32..47b11ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
@@ -64,7 +64,7 @@ public class DeallocatableFramePool implements IDeallocatableFramePool {
 
     private ByteBuffer mergeExistingFrames(int frameSize) throws HyracksDataException {
         int mergedSize = memBudget - allocated;
-        for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext(); ) {
+        for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext();) {
             ByteBuffer buffer = iter.next();
             iter.remove();
             mergedSize += buffer.capacity();
@@ -88,7 +88,7 @@ public class DeallocatableFramePool implements IDeallocatableFramePool {
     }
 
     private ByteBuffer findExistingFrame(int frameSize) {
-        for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext(); ) {
+        for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext();) {
             ByteBuffer next = iter.next();
             if (next.capacity() >= frameSize) {
                 iter.remove();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
index 700500b..6a60813 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
@@ -59,4 +59,3 @@ public class FrameBufferManager implements IFrameBufferManager {
     }
 
 }
-

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
index 4359b54..27e2671 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
@@ -65,8 +65,8 @@ public class SortMergeFrameReader implements IFrameReader {
             }
             List<IFrameReader> batch = new ArrayList<IFrameReader>(nSenders);
             pbm.getNextBatch(batch, nSenders);
-            merger = new RunMergingFrameReader(ctx, batch, inFrames, sortFields,
-                    comparators, nmkComputer, recordDescriptor);
+            merger = new RunMergingFrameReader(ctx, batch, inFrames, sortFields, comparators, nmkComputer,
+                    recordDescriptor);
         } else {
             // multi level merge.
             throw new HyracksDataException("Not yet supported");

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
index b1cd83e..920fdb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
@@ -82,8 +82,8 @@ public class LocalityAwareMToNPartitioningConnectorDescriptor extends AbstractMT
                 expectedPartitions.set(i);
             }
         }
-        NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
-                expectedPartitions);
+        NonDeterministicChannelReader channelReader =
+                new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
         NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
         return new PartitionCollector(ctx, getConnectorId(), receiverIndex, expectedPartitions, frameReader,
                 channelReader);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
index d26b9ef..02fbedb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
@@ -54,8 +54,8 @@ public class MToNPartitioningConnectorDescriptor extends AbstractMToNConnectorDe
             int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
         BitSet expectedPartitions = new BitSet(nProducerPartitions);
         expectedPartitions.set(0, nProducerPartitions);
-        NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
-                expectedPartitions);
+        NonDeterministicChannelReader channelReader =
+                new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
         NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
         return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
index edcad42..026ca5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
@@ -69,8 +69,8 @@ public class MToNPartitioningMergingConnectorDescriptor extends AbstractMToNConn
     public IFrameWriter createPartitioner(IHyracksTaskContext ctx, RecordDescriptor recordDesc,
             IPartitionWriterFactory edwFactory, int index, int nProducerPartitions, int nConsumerPartitions)
             throws HyracksDataException {
-        final PartitionDataWriter hashWriter = new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory, recordDesc,
-                tpcf.createPartitioner());
+        final PartitionDataWriter hashWriter =
+                new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory, recordDesc, tpcf.createPartitioner());
         return hashWriter;
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
index eda353b..78428a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
@@ -57,8 +57,8 @@ public class OneToOneConnectorDescriptor extends AbstractConnectorDescriptor {
             int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
         BitSet expectedPartitions = new BitSet(nProducerPartitions);
         expectedPartitions.set(index);
-        NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
-                expectedPartitions);
+        NonDeterministicChannelReader channelReader =
+                new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
         NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
         return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
index 97d5f2b..e1df709 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
@@ -30,7 +30,7 @@ public class PartitionWithMessageDataWriter extends PartitionDataWriter {
 
     public PartitionWithMessageDataWriter(IHyracksTaskContext ctx, int consumerPartitionCount,
             IPartitionWriterFactory pwFactory, RecordDescriptor recordDescriptor, ITuplePartitionComputer tpc)
-                    throws HyracksDataException {
+            throws HyracksDataException {
         super(ctx, consumerPartitionCount, pwFactory, recordDescriptor, tpc);
         // since the message partition writer sends broadcast messages, we allocate frames when we create the writer
         for (int i = 0; i < consumerPartitionCount; ++i) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
index 543ad40..18dc9c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
@@ -95,7 +95,7 @@ public abstract class AbstractFileWriteOperatorDescriptor extends AbstractSingle
     @Override
     public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
             IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIoManager(),
-                partition), recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
+        return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIoManager(), partition),
+                recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
     }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
index d121ec4..175bdae 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
@@ -67,8 +67,8 @@ public class DelimitedDataTupleParserFactory implements ITupleParserFactory {
                     ArrayTupleBuilder tb = new ArrayTupleBuilder(valueParsers.length);
                     DataOutput dos = tb.getDataOutput();
 
-                    FieldCursorForDelimitedDataParser cursor = new FieldCursorForDelimitedDataParser(
-                            new InputStreamReader(in), fieldDelimiter, quote);
+                    FieldCursorForDelimitedDataParser cursor =
+                            new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
                     while (cursor.nextRecord()) {
                         tb.reset();
                         for (int i = 0; i < valueParsers.length; ++i) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index 40f02f9..3232527 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -84,17 +84,18 @@ public class HashSpillableTableFactory implements ISpillableTableFactory {
             intermediateResultKeys[i] = i;
         }
 
-        final FrameTuplePairComparator ftpcInputCompareToAggregate = new FrameTuplePairComparator(keyFields,
-                intermediateResultKeys, comparators);
+        final FrameTuplePairComparator ftpcInputCompareToAggregate =
+                new FrameTuplePairComparator(keyFields, intermediateResultKeys, comparators);
 
-        final ITuplePartitionComputer tpc = new FieldHashPartitionComputerFamily(keyFields, hashFunctionFamilies)
-                .createPartitioner(seed);
+        final ITuplePartitionComputer tpc =
+                new FieldHashPartitionComputerFamily(keyFields, hashFunctionFamilies).createPartitioner(seed);
 
         // For calculating hash value for the already aggregated tuples (not incoming tuples)
         // This computer is required to calculate the hash value of a aggregated tuple
         // while doing the garbage collection work on Hash Table.
-        final ITuplePartitionComputer tpcIntermediate = new FieldHashPartitionComputerFamily(intermediateResultKeys,
-                hashFunctionFamilies).createPartitioner(seed);
+        final ITuplePartitionComputer tpcIntermediate =
+                new FieldHashPartitionComputerFamily(intermediateResultKeys, hashFunctionFamilies)
+                        .createPartitioner(seed);
 
         final IAggregatorDescriptor aggregator = aggregateFactory.createAggregator(ctx, inRecordDescriptor,
                 outRecordDescriptor, keyFields, intermediateResultKeys, null, -1);
@@ -110,9 +111,8 @@ public class HashSpillableTableFactory implements ISpillableTableFactory {
         final int numPartitions = getNumOfPartitions(inputDataBytesSize / ctx.getInitialFrameSize(), memoryBudget);
         final int entriesPerPartition = (int) Math.ceil(1.0 * tableSize / numPartitions);
         if (LOGGER.isDebugEnabled()) {
-            LOGGER.debug(
-                    "created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize + "  #partitions:"
-                    + numPartitions);
+            LOGGER.debug("created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize
+                    + "  #partitions:" + numPartitions);
         }
 
         final ArrayTupleBuilder outputTupleBuilder = new ArrayTupleBuilder(outRecordDescriptor.getFields().length);
@@ -122,14 +122,14 @@ public class HashSpillableTableFactory implements ISpillableTableFactory {
             private final TuplePointer pointer = new TuplePointer();
             private final BitSet spilledSet = new BitSet(numPartitions);
             // This frame pool will be shared by both data table and hash table.
-            private final IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx,
-                    framesLimit * ctx.getInitialFrameSize());
+            private final IDeallocatableFramePool framePool =
+                    new DeallocatableFramePool(ctx, framesLimit * ctx.getInitialFrameSize());
             // buffer manager for hash table
-            private final ISimpleFrameBufferManager bufferManagerForHashTable = new FramePoolBackedFrameBufferManager(
-                    framePool);
+            private final ISimpleFrameBufferManager bufferManagerForHashTable =
+                    new FramePoolBackedFrameBufferManager(framePool);
 
-            private final ISerializableTable hashTableForTuplePointer = new SerializableHashTable(tableSize, ctx,
-                    bufferManagerForHashTable);
+            private final ISerializableTable hashTableForTuplePointer =
+                    new SerializableHashTable(tableSize, ctx, bufferManagerForHashTable);
 
             // buffer manager for data table
             final IPartitionedTupleBufferManager bufferManager = new VPartitionTupleBufferManager(
@@ -138,8 +138,8 @@ public class HashSpillableTableFactory implements ISpillableTableFactory {
 
             final ITuplePointerAccessor bufferAccessor = bufferManager.getTuplePointerAccessor(outRecordDescriptor);
 
-            private final PreferToSpillFullyOccupiedFramePolicy spillPolicy = new PreferToSpillFullyOccupiedFramePolicy(
-                    bufferManager, spilledSet);
+            private final PreferToSpillFullyOccupiedFramePolicy spillPolicy =
+                    new PreferToSpillFullyOccupiedFramePolicy(bufferManager, spilledSet);
 
             private final FrameTupleAppender outputAppender = new FrameTupleAppender(new VSizeFrame(ctx));
 
@@ -157,8 +157,8 @@ public class HashSpillableTableFactory implements ISpillableTableFactory {
 
                 // Checks whether the garbage collection is required and conducts a garbage collection if so.
                 if (hashTableForTuplePointer.isGarbageCollectionNeeded()) {
-                    int numberOfFramesReclaimed = hashTableForTuplePointer.collectGarbage(bufferAccessor,
-                            tpcIntermediate);
+                    int numberOfFramesReclaimed =
+                            hashTableForTuplePointer.collectGarbage(bufferAccessor, tpcIntermediate);
                     if (LOGGER.isDebugEnabled()) {
                         LOGGER.debug("Garbage Collection on Hash table is done. Deallocated frames:"
                                 + numberOfFramesReclaimed);
@@ -310,8 +310,8 @@ public class HashSpillableTableFactory implements ISpillableTableFactory {
             // partition again and again.
             return 2;
         }
-        long numberOfPartitions = (long) (Math
-                .ceil((nubmerOfInputFrames * FUDGE_FACTOR - frameLimit) / (frameLimit - 1)));
+        long numberOfPartitions =
+                (long) (Math.ceil((nubmerOfInputFrames * FUDGE_FACTOR - frameLimit) / (frameLimit - 1)));
         numberOfPartitions = Math.max(2, numberOfPartitions);
         if (numberOfPartitions > frameLimit) {
             numberOfPartitions = (long) Math.ceil(Math.sqrt(nubmerOfInputFrames * FUDGE_FACTOR));

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
index 7acd687..cb32c4a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
@@ -113,7 +113,8 @@ public class AvgFieldGroupAggregatorFactory implements IFieldAggregateDescriptor
                 int count = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
-                sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
                 count += 1;
                 if (!useObjectState) {
                     try {
@@ -139,7 +140,8 @@ public class AvgFieldGroupAggregatorFactory implements IFieldAggregateDescriptor
                 int sum = 0, count = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
-                sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
                 count += 1;
                 if (!useObjectState) {
                     ByteBuffer buf = ByteBuffer.wrap(data);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
index b77c91c..290cc58 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
@@ -118,7 +118,8 @@ public class AvgFieldMergeAggregatorFactory implements IFieldAggregateDescriptor
                 int sum = 0, count = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
-                sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
                 count += 1;
                 if (!useObjectState) {
                     ByteBuffer buf = ByteBuffer.wrap(data);
@@ -156,8 +157,10 @@ public class AvgFieldMergeAggregatorFactory implements IFieldAggregateDescriptor
                 int count = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
-                sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
-                count += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
+                sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                count += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
                 if (!useObjectState) {
                     try {
                         fieldOutput.writeInt(sum);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
index 771303f..fc8d956 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
@@ -35,8 +35,7 @@ import org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory;
 /**
  *
  */
-public class FloatSumFieldAggregatorFactory implements
-        IFieldAggregateDescriptorFactory {
+public class FloatSumFieldAggregatorFactory implements IFieldAggregateDescriptorFactory {
 
     private static final long serialVersionUID = 1L;
 
@@ -44,7 +43,7 @@ public class FloatSumFieldAggregatorFactory implements
 
     private final boolean useObjectState;
 
-    public FloatSumFieldAggregatorFactory(int aggField, boolean useObjState){
+    public FloatSumFieldAggregatorFactory(int aggField, boolean useObjState) {
         this.aggField = aggField;
         this.useObjectState = useObjState;
     }
@@ -53,8 +52,7 @@ public class FloatSumFieldAggregatorFactory implements
      * @see org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory#createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext, org.apache.hyracks.api.dataflow.value.RecordDescriptor, org.apache.hyracks.api.dataflow.value.RecordDescriptor)
      */
     @Override
-    public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx,
-            RecordDescriptor inRecordDescriptor,
+    public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor,
             RecordDescriptor outRecordDescriptor) throws HyracksDataException {
         return new IFieldAggregateDescriptor() {
 
@@ -64,8 +62,8 @@ public class FloatSumFieldAggregatorFactory implements
             }
 
             @Override
-            public void outputPartialResult(DataOutput fieldOutput, byte[] data,
-                    int offset, AggregateState state) throws HyracksDataException {
+            public void outputPartialResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state)
+                    throws HyracksDataException {
                 float sum;
                 if (!useObjectState) {
                     sum = FloatPointable.getFloat(data, offset);
@@ -80,8 +78,8 @@ public class FloatSumFieldAggregatorFactory implements
             }
 
             @Override
-            public void outputFinalResult(DataOutput fieldOutput, byte[] data,
-                    int offset, AggregateState state) throws HyracksDataException {
+            public void outputFinalResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state)
+                    throws HyracksDataException {
                 float sum;
                 if (!useObjectState) {
                     sum = FloatPointable.getFloat(data, offset);
@@ -106,14 +104,14 @@ public class FloatSumFieldAggregatorFactory implements
             }
 
             @Override
-            public void init(IFrameTupleAccessor accessor, int tIndex,
-                    DataOutput fieldOutput, AggregateState state)
+            public void init(IFrameTupleAccessor accessor, int tIndex, DataOutput fieldOutput, AggregateState state)
                     throws HyracksDataException {
                 float sum = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
 
-                sum += FloatPointable.getFloat(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += FloatPointable.getFloat(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
 
                 if (!useObjectState) {
                     try {
@@ -138,13 +136,13 @@ public class FloatSumFieldAggregatorFactory implements
             }
 
             @Override
-            public void aggregate(IFrameTupleAccessor accessor, int tIndex,
-                    byte[] data, int offset, AggregateState state)
-                    throws HyracksDataException {
+            public void aggregate(IFrameTupleAccessor accessor, int tIndex, byte[] data, int offset,
+                    AggregateState state) throws HyracksDataException {
                 float sum = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
-                sum += FloatPointable.getFloat(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += FloatPointable.getFloat(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
 
                 if (!useObjectState) {
                     ByteBuffer buf = ByteBuffer.wrap(data);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
index 511b651..90e1474 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
@@ -107,7 +107,8 @@ public class IntSumFieldAggregatorFactory implements IFieldAggregateDescriptorFa
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
 
-                sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
 
                 if (!useObjectState) {
                     try {
@@ -143,7 +144,8 @@ public class IntSumFieldAggregatorFactory implements IFieldAggregateDescriptorFa
                 int sum = 0;
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
-                sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+                sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+                        tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
 
                 if (!useObjectState) {
                     ByteBuffer buf = ByteBuffer.wrap(data);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
index 6900918..d43d4fd 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
@@ -113,9 +113,9 @@ public class MinMaxStringFieldAggregatorFactory implements IFieldAggregateDescri
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
                 int fieldLength = accessor.getFieldLength(tIndex, aggField);
-                String strField = utf8SerializerDeserializer.deserialize(new DataInputStream(
-                        new ByteArrayInputStream(accessor.getBuffer().array(), tupleOffset
-                                + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
+                String strField = utf8SerializerDeserializer
+                        .deserialize(new DataInputStream(new ByteArrayInputStream(accessor.getBuffer().array(),
+                                tupleOffset + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
                 if (hasBinaryState) {
                     // Object-binary-state
                     Object[] storedState;
@@ -158,9 +158,9 @@ public class MinMaxStringFieldAggregatorFactory implements IFieldAggregateDescri
                 int tupleOffset = accessor.getTupleStartOffset(tIndex);
                 int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
                 int fieldLength = accessor.getFieldLength(tIndex, aggField);
-                String strField = utf8SerializerDeserializer.deserialize(new DataInputStream(
-                        new ByteArrayInputStream(accessor.getBuffer().array(), tupleOffset
-                                + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
+                String strField = utf8SerializerDeserializer
+                        .deserialize(new DataInputStream(new ByteArrayInputStream(accessor.getBuffer().array(),
+                                tupleOffset + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
                 if (hasBinaryState) {
                     int stateIdx = IntegerPointable.getInteger(data, offset);
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
index 595e2c4..d2aa35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
@@ -95,8 +95,8 @@ public class MultiFieldsAggregatorFactory extends AbstractAccumulatingAggregator
             }
 
             @Override
-            public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex,
-                    AggregateState state) throws HyracksDataException {
+            public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor,
+                    int tIndex, AggregateState state) throws HyracksDataException {
                 DataOutput dos = tupleBuilder.getDataOutput();
 
                 int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
@@ -151,8 +151,8 @@ public class MultiFieldsAggregatorFactory extends AbstractAccumulatingAggregator
                     int fieldIndex = 0;
                     for (int i = 0; i < aggregators.length; i++) {
                         if (aggregators[i].needsBinaryState()) {
-                            int stateFieldOffset = stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length
-                                    + fieldIndex);
+                            int stateFieldOffset =
+                                    stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length + fieldIndex);
                             aggregators[i].aggregate(accessor, tIndex, stateAccessor.getBuffer().array(),
                                     stateTupleOffset + stateAccessor.getFieldSlotsLength() + stateFieldOffset,
                                     ((AggregateState[]) state.state)[i]);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
index 7e6e147..43f57af 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
@@ -71,8 +71,8 @@ public class ExternalGroupBuildOperatorNodePushable extends AbstractUnaryInputSi
         for (int i = 0; i < comparatorFactories.length; ++i) {
             comparators[i] = comparatorFactories[i].createBinaryComparator();
         }
-        this.firstNormalizerComputer = firstNormalizerFactory == null ? null
-                : firstNormalizerFactory.createNormalizedKeyComputer();
+        this.firstNormalizerComputer =
+                firstNormalizerFactory == null ? null : firstNormalizerFactory.createNormalizedKeyComputer();
         this.spillableTableFactory = spillableTableFactory;
         this.inRecordDescriptor = inRecordDescriptor;
         this.outRecordDescriptor = outRecordDescriptor;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
index 23dee02..6dea186 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
@@ -124,7 +124,7 @@ public class ExternalGroupOperatorDescriptor extends AbstractOperatorDescriptor
         @Override
         public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
                 final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
-                        throws HyracksDataException {
+                throws HyracksDataException {
             return new ExternalGroupBuildOperatorNodePushable(ctx, new TaskId(getActivityId(), partition), tableSize,
                     fileSize, keyFields, framesLimit, comparatorFactories, firstNormalizerFactory,
                     partialAggregatorFactory, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0),
@@ -142,7 +142,7 @@ public class ExternalGroupOperatorDescriptor extends AbstractOperatorDescriptor
         @Override
         public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
                 IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
-                        throws HyracksDataException {
+                throws HyracksDataException {
             return new ExternalGroupWriteOperatorNodePushable(ctx,
                     new TaskId(new ActivityId(getOperatorId(), AGGREGATE_ACTIVITY_ID), partition),
                     spillableTableFactory, partialRecDesc, outRecDesc, framesLimit, keyFields, firstNormalizerFactory,

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
index fb88775..95994f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
@@ -130,8 +130,8 @@ public class ExternalGroupWriteOperatorNodePushable extends AbstractUnaryOutputS
                         runs[i].getFileSize(), mergeGroupFields, groupByComparators, nmkComputer,
                         mergeAggregatorFactory, partialAggRecordDesc, outRecordDesc, frameLimit, level);
                 RunFileWriter[] runFileWriters = new RunFileWriter[partitionTable.getNumPartitions()];
-                int[] sizeInTuplesNextLevel = buildGroup(runs[i].createDeleteOnCloseReader(), partitionTable,
-                        runFileWriters);
+                int[] sizeInTuplesNextLevel =
+                        buildGroup(runs[i].createDeleteOnCloseReader(), partitionTable, runFileWriters);
                 for (int idFile = 0; idFile < runFileWriters.length; idFile++) {
                     if (runFileWriters[idFile] != null) {
                         generatedRuns.add(runFileWriters[idFile]);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
index d29e9ab..8e7777f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
@@ -63,8 +63,7 @@ public class ExternalHashGroupBy {
         }
     }
 
-    private void flushPartitionToRun(int partition, RunFileWriter writer)
-            throws HyracksDataException {
+    private void flushPartitionToRun(int partition, RunFileWriter writer) throws HyracksDataException {
         try {
             spilledNumTuples[partition] += table.flushFrames(partition, writer, AggregateType.PARTIAL);
             table.clear(partition);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
index db6102e..ca78046 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
@@ -82,9 +82,8 @@ public class PreclusteredGroupWriter implements IFrameWriter {
 
         // Deducts input/output frames.
         this.memoryLimit = framesLimit <= 0 ? -1 : ((long) (framesLimit - 2)) * ctx.getInitialFrameSize();
-        this.aggregator =
-                aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields, groupFields, writer,
-                        this.memoryLimit);
+        this.aggregator = aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields, groupFields,
+                writer, this.memoryLimit);
         this.aggregateState = aggregator.createAggregateStates();
         copyFrame = new VSizeFrame(ctx);
         inFrameAccessor = new FrameTupleAccessor(inRecordDesc);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
index 10cc954..ec652eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
@@ -197,11 +197,10 @@ public class IntersectOperatorDescriptor extends AbstractOperatorDescriptor {
             this.allProjectFields = projectedFields;
             this.firstKeyNormalizerComputer =
                     firstKeyNormalizerFactory != null ? firstKeyNormalizerFactory.createNormalizedKeyComputer() : null;
-            this.normalizedKeyDecisive =
-                    firstKeyNormalizerFactory != null
-                            ? firstKeyNormalizerFactory.getNormalizedKeyProperties().isDecisive()
-                                    && compareFields[0].length == 1
-                            : false;
+            this.normalizedKeyDecisive = firstKeyNormalizerFactory != null
+                    ? firstKeyNormalizerFactory.getNormalizedKeyProperties().isDecisive()
+                            && compareFields[0].length == 1
+                    : false;
             comparators = new IBinaryComparator[compareFields[0].length];
             for (int i = 0; i < comparators.length; i++) {
                 comparators[i] = comparatorFactory[i].createBinaryComparator();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
index 665bb2b..bee0590 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
@@ -176,22 +176,22 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
             for (int i = 0; i < comparatorFactories.length; ++i) {
                 comparators[i] = comparatorFactories[i].createBinaryComparator();
             }
-            final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length]
-                    : null;
+            final IMissingWriter[] nullWriters1 =
+                    isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length] : null;
             if (isLeftOuter) {
                 for (int i = 0; i < nonMatchWriterFactories1.length; i++) {
                     nullWriters1[i] = nonMatchWriterFactories1[i].createMissingWriter();
                 }
             }
-            final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
-                    : predEvaluatorFactory.createPredicateEvaluator());
+            final IPredicateEvaluator predEvaluator =
+                    (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
 
             IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
                 private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(
                         ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition));
                 private final FrameTupleAccessor accessorBuild = new FrameTupleAccessor(rd1);
-                private final ITuplePartitionComputer hpcBuild = new FieldHashPartitionComputerFactory(keys1,
-                        hashFunctionFactories).createPartitioner();
+                private final ITuplePartitionComputer hpcBuild =
+                        new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
                 private final FrameTupleAppender appender = new FrameTupleAppender();
                 private final FrameTupleAppender ftappender = new FrameTupleAppender();
                 private IFrame[] bufferForPartitions;
@@ -285,8 +285,8 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
                         if (memsize > inputsize0) {
                             state.nPartitions = 0;
                         } else {
-                            state.nPartitions = (int) (Math
-                                    .ceil((inputsize0 * factor / nPartitions - memsize) / (memsize - 1)));
+                            state.nPartitions =
+                                    (int) (Math.ceil((inputsize0 * factor / nPartitions - memsize) / (memsize - 1)));
                         }
                         if (state.nPartitions <= 0) {
                             // becomes in-memory HJ
@@ -303,10 +303,10 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
                         throw new HyracksDataException("not enough memory");
                     }
 
-                    ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories)
-                            .createPartitioner();
-                    ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories)
-                            .createPartitioner();
+                    ITuplePartitionComputer hpc0 =
+                            new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner();
+                    ITuplePartitionComputer hpc1 =
+                            new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
                     int tableSize = (int) (state.memoryForHashtable * recordsPerFrame * factor);
                     ISerializableTable table = new SimpleSerializableHashTable(tableSize, ctx);
                     state.joiner =
@@ -369,23 +369,23 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
             for (int i = 0; i < comparatorFactories.length; ++i) {
                 comparators[i] = comparatorFactories[i].createBinaryComparator();
             }
-            final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length]
-                    : null;
+            final IMissingWriter[] nullWriters1 =
+                    isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length] : null;
             if (isLeftOuter) {
                 for (int i = 0; i < nonMatchWriterFactories1.length; i++) {
                     nullWriters1[i] = nonMatchWriterFactories1[i].createMissingWriter();
                 }
             }
-            final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
-                    : predEvaluatorFactory.createPredicateEvaluator());
+            final IPredicateEvaluator predEvaluator =
+                    (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
 
             IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
                 private BuildAndPartitionTaskState state;
                 private final FrameTupleAccessor accessorProbe = new FrameTupleAccessor(rd0);
-                private final ITuplePartitionComputerFactory hpcf0 = new FieldHashPartitionComputerFactory(keys0,
-                        hashFunctionFactories);
-                private final ITuplePartitionComputerFactory hpcf1 = new FieldHashPartitionComputerFactory(keys1,
-                        hashFunctionFactories);
+                private final ITuplePartitionComputerFactory hpcf0 =
+                        new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories);
+                private final ITuplePartitionComputerFactory hpcf1 =
+                        new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories);
                 private final ITuplePartitionComputer hpcProbe = hpcf0.createPartitioner();
 
                 private final FrameTupleAppender appender = new FrameTupleAppender();
@@ -476,10 +476,10 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
                         } finally {
                             state.joiner.releaseMemory();
                         }
-                        ITuplePartitionComputer hpcRep0 = new RepartitionComputerFactory(state.nPartitions, hpcf0)
-                                .createPartitioner();
-                        ITuplePartitionComputer hpcRep1 = new RepartitionComputerFactory(state.nPartitions, hpcf1)
-                                .createPartitioner();
+                        ITuplePartitionComputer hpcRep0 =
+                                new RepartitionComputerFactory(state.nPartitions, hpcf0).createPartitioner();
+                        ITuplePartitionComputer hpcRep1 =
+                                new RepartitionComputerFactory(state.nPartitions, hpcf1).createPartitioner();
                         if (state.memoryForHashtable != memsize - 2) {
                             for (int i = 0; i < state.nPartitions; i++) {
                                 ByteBuffer buf = bufferForPartitions[i].getBuffer();
@@ -505,11 +505,10 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
                                     continue;
                                 }
                                 table.reset();
-                                InMemoryHashJoin joiner =
-                                        new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0), hpcRep0,
-                                                new FrameTupleAccessor(rd1), rd1, hpcRep1,
-                                                new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter,
-                                                nullWriters1, table, predEvaluator, null);
+                                InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0),
+                                        hpcRep0, new FrameTupleAccessor(rd1), rd1, hpcRep1,
+                                        new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter,
+                                        nullWriters1, table, predEvaluator, null);
 
                                 if (buildWriter != null) {
                                     RunFileReader buildReader = buildWriter.createDeleteOnCloseReader();
@@ -559,8 +558,8 @@ public class HybridHashJoinOperatorDescriptor extends AbstractOperatorDescriptor
                 private void write(int i, ByteBuffer head) throws HyracksDataException {
                     RunFileWriter writer = probeWriters[i];
                     if (writer == null) {
-                        FileReference file = ctx
-                                .createManagedWorkspaceFile(PartitionAndJoinActivityNode.class.getSimpleName());
+                        FileReference file =
+                                ctx.createManagedWorkspaceFile(PartitionAndJoinActivityNode.class.getSimpleName());
                         writer = new RunFileWriter(file, ctx.getIoManager());
                         writer.open();
                         probeWriters[i] = writer;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
index a51b780..10c6227 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -66,22 +66,20 @@ public class InMemoryHashJoin {
 
     private static final Logger LOGGER = LogManager.getLogger();
 
-    public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
-            ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild,
-            ITuplePartitionComputer tpcBuild, FrameTuplePairComparator comparator, boolean isLeftOuter,
-            IMissingWriter[] missingWritersBuild, ISerializableTable table, IPredicateEvaluator predEval,
-            ISimpleFrameBufferManager bufferManager)
+    public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe, ITuplePartitionComputer tpcProbe,
+            FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild,
+            FrameTuplePairComparator comparator, boolean isLeftOuter, IMissingWriter[] missingWritersBuild,
+            ISerializableTable table, IPredicateEvaluator predEval, ISimpleFrameBufferManager bufferManager)
             throws HyracksDataException {
         this(ctx, accessorProbe, tpcProbe, accessorBuild, rDBuild, tpcBuild, comparator, isLeftOuter,
                 missingWritersBuild, table, predEval, false, bufferManager);
     }
 
-    public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
-            ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild,
-            RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild, FrameTuplePairComparator comparator,
-            boolean isLeftOuter, IMissingWriter[] missingWritersBuild, ISerializableTable table,
-            IPredicateEvaluator predEval, boolean reverse, ISimpleFrameBufferManager bufferManager)
-            throws HyracksDataException {
+    public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe, ITuplePartitionComputer tpcProbe,
+            FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild,
+            FrameTuplePairComparator comparator, boolean isLeftOuter, IMissingWriter[] missingWritersBuild,
+            ISerializableTable table, IPredicateEvaluator predEval, boolean reverse,
+            ISimpleFrameBufferManager bufferManager) throws HyracksDataException {
         this.table = table;
         storedTuplePointer = new TuplePointer();
         buffers = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
index d81d955..3873bae 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
@@ -162,15 +162,15 @@ public class InMemoryHashJoinOperatorDescriptor extends AbstractOperatorDescript
             for (int i = 0; i < comparatorFactories.length; ++i) {
                 comparators[i] = comparatorFactories[i].createBinaryComparator();
             }
-            final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length]
-                    : null;
+            final IMissingWriter[] nullWriters1 =
+                    isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length] : null;
             if (isLeftOuter) {
                 for (int i = 0; i < nonMatchWriterFactories.length; i++) {
                     nullWriters1[i] = nonMatchWriterFactories[i].createMissingWriter();
                 }
             }
-            final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
-                    : predEvaluatorFactory.createPredicateEvaluator());
+            final IPredicateEvaluator predEvaluator =
+                    (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
 
             final int memSizeInBytes = memSizeInFrames * ctx.getInitialFrameSize();
             final IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx, memSizeInBytes);
@@ -181,10 +181,10 @@ public class InMemoryHashJoinOperatorDescriptor extends AbstractOperatorDescript
 
                 @Override
                 public void open() throws HyracksDataException {
-                    ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories)
-                            .createPartitioner();
-                    ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories)
-                            .createPartitioner();
+                    ITuplePartitionComputer hpc0 =
+                            new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner();
+                    ITuplePartitionComputer hpc1 =
+                            new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
                     state = new HashBuildTaskState(ctx.getJobletContext().getJobId(),
                             new TaskId(getActivityId(), partition));
                     ISerializableTable table = new SerializableHashTable(tableSize, ctx, bufferManager);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
index 5306ae5..9c5dc60 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
@@ -51,8 +51,8 @@ class JoinComparator implements ITuplePairComparator {
         int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
         int fLen1 = fEnd1 - fStart1;
 
-        int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
-                .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+        int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+                accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
         if (c != 0) {
             return c;
         }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
index d9c0bcd..d0f5a73 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
@@ -70,9 +70,9 @@ public class NestedLoopJoin {
         if (memSize < 3) {
             throw new HyracksDataException("Not enough memory is available for Nested Loop Join");
         }
-        this.outerBufferMngr = new VariableFrameMemoryManager(
-                new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)),
-                FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2));
+        this.outerBufferMngr =
+                new VariableFrameMemoryManager(new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)),
+                        FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2));
 
         this.predEvaluator = predEval;
         this.isReversed = false;
@@ -90,8 +90,8 @@ public class NestedLoopJoin {
             missingTupleBuilder = null;
         }
 
-        FileReference file = ctx.getJobletContext()
-                .createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString());
+        FileReference file =
+                ctx.getJobletContext().createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString());
         runFileWriter = new RunFileWriter(file, ctx.getIoManager());
         runFileWriter.open();
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/cb9ca975/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
index 99dbfad..2236056 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
@@ -117,8 +117,8 @@ public class NestedLoopJoinOperatorDescriptor extends AbstractOperatorDescriptor
             final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(nljAid, 0);
             final RecordDescriptor rd1 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
             final ITuplePairComparator comparator = comparatorFactory.createTuplePairComparator(ctx);
-            final IPredicateEvaluator predEvaluator = (predEvaluatorFactory != null)
-                    ? predEvaluatorFactory.createPredicateEvaluator() : null;
+            final IPredicateEvaluator predEvaluator =
+                    (predEvaluatorFactory != null) ? predEvaluatorFactory.createPredicateEvaluator() : null;
 
             final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nullWriterFactories1.length] : null;
             if (isLeftOuter) {