You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by mb...@apache.org on 2017/10/26 15:14:00 UTC

[2/3] asterixdb git commit: [NO ISSUE][HYR][*DB] Minor refactoring / address SonarQube comments

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
index 4843f81..f800be8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
@@ -25,7 +25,6 @@ import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.commons.lang3.mutable.Mutable;
@@ -49,11 +48,11 @@ import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractUnne
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -662,26 +661,24 @@ public class FDsAndEquivClassesVisitor implements ILogicalOperatorVisitor<Void,
         // equivalent
         // class should still be propagated (kept).
         Set<LogicalVariable> usedVarSet = new HashSet<LogicalVariable>(usedVariables);
-        for (Entry<LogicalVariable, EquivalenceClass> entry : chldClasses.entrySet()) {
-            EquivalenceClass ec = entry.getValue();
+        chldClasses.forEach((key, ec) -> {
             for (ILogicalExpression expr : ec.getExpressionMembers()) {
-                Set<LogicalVariable> exprUsedVars = new HashSet<LogicalVariable>();
+                Set<LogicalVariable> exprUsedVars = new HashSet<>();
                 expr.getUsedVariables(exprUsedVars);
                 exprUsedVars.retainAll(usedVarSet);
                 // Check if the expression member uses a used variable.
                 if (!exprUsedVars.isEmpty()) {
-                    for (LogicalVariable v : ec.getMembers()) {
+                    // If variable members contain a used variable, the representative variable should be a used
+                    // variable.
+                    ec.getMembers().forEach(v -> {
                         eqClasses.put(v, ec);
-                        // If variable members contain a used variable, the
-                        // representative
-                        // variable should be a used variable.
                         if (usedVarSet.contains(v)) {
                             ec.setVariableRepresentative(v);
                         }
-                    }
+                    });
                 }
             }
-        }
+        });
 
         List<FunctionalDependency> chldFds = getOrComputeFDs(op2, ctx);
         for (FunctionalDependency fd : chldFds) {
@@ -786,8 +783,7 @@ public class FDsAndEquivClassesVisitor implements ILogicalOperatorVisitor<Void,
             LogicalVariable var = assignVars.get(assignVarIndex);
             ILogicalExpression expr = assignExprs.get(assignVarIndex).getValue();
             Map<LogicalVariable, EquivalenceClass> newVarEqcMap = new HashMap<LogicalVariable, EquivalenceClass>();
-            for (Entry<LogicalVariable, EquivalenceClass> entry : eqClasses.entrySet()) {
-                EquivalenceClass eqc = entry.getValue();
+            eqClasses.forEach((key, eqc) -> {
                 // If the equivalence class contains the right-hand-side
                 // expression,
                 // the left-hand-side variable is added into the equivalence
@@ -795,9 +791,9 @@ public class FDsAndEquivClassesVisitor implements ILogicalOperatorVisitor<Void,
                 if (eqc.contains(expr)) {
                     eqc.addMember(var);
                     newVarEqcMap.put(var, eqc); // Add var as a map key for the
-                                                // equivalence class.
+                    // equivalence class.
                 }
-            }
+            });
             eqClasses.putAll(newVarEqcMap);
         }
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index b3b9da1..0c53685 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Objects;
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -37,11 +38,11 @@ import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogi
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -633,15 +634,13 @@ public class IsomorphismOperatorVisitor implements ILogicalOperatorVisitor<Boole
         variableMapping.clear();
         IsomorphismUtilities.mapVariablesTopDown(op, argOp, variableMapping);
 
-        List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
-        if (argOp.getInputs().size() > 0) {
-            for (int i = 0; i < argOp.getInputs().size(); i++) {
-                VariableUtilities.getLiveVariables(argOp.getInputs().get(i).getValue(), liveVars);
-            }
+        List<LogicalVariable> liveVars = new ArrayList<>();
+        for (int i = 0; i < argOp.getInputs().size(); i++) {
+            VariableUtilities.getLiveVariables(argOp.getInputs().get(i).getValue(), liveVars);
         }
-        List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> producedVars = new ArrayList<>();
         VariableUtilities.getProducedVariables(argOp, producedVars);
-        List<LogicalVariable> producedVarsNew = new ArrayList<LogicalVariable>();
+        List<LogicalVariable> producedVarsNew = new ArrayList<>();
         VariableUtilities.getProducedVariables(op, producedVarsNew);
 
         if (producedVars.size() != producedVarsNew.size()) {
@@ -671,14 +670,7 @@ public class IsomorphismOperatorVisitor implements ILogicalOperatorVisitor<Boole
     }
 
     private static boolean variableEqual(LogicalVariable var, LogicalVariable varArg) {
-        if (var == null && varArg == null) {
-            return true;
-        }
-        if (var.equals(varArg)) {
-            return true;
-        } else {
-            return false;
-        }
+        return Objects.equals(var, varArg);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
index 52d8e64..2caa252 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.commons.lang3.mutable.Mutable;
@@ -34,17 +33,16 @@ import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
 import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -435,11 +433,9 @@ public class IsomorphismVariableMappingVisitor implements ILogicalOperatorVisito
     }
 
     private Mutable<ILogicalExpression> copyExpressionAndSubtituteVars(Mutable<ILogicalExpression> expr) {
-        ILogicalExpression copy = ((AbstractLogicalExpression) expr.getValue()).cloneExpression();
-        for (Entry<LogicalVariable, LogicalVariable> entry : variableMapping.entrySet()) {
-            copy.substituteVar(entry.getKey(), entry.getValue());
-        }
-        return new MutableObject<ILogicalExpression>(copy);
+        ILogicalExpression copy = expr.getValue().cloneExpression();
+        variableMapping.forEach(copy::substituteVar);
+        return new MutableObject<>(copy);
     }
 
     private void mapVariablesForUnion(ILogicalOperator op, ILogicalOperator arg) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
index cef387a..24c5162 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
@@ -65,10 +65,7 @@ public class LogicalExpressionDeepCopyWithNewVariablesVisitor
     private void deepCopyAnnotations(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
         Map<Object, IExpressionAnnotation> srcAnnotations = src.getAnnotations();
         Map<Object, IExpressionAnnotation> destAnnotations = dest.getAnnotations();
-        for (Map.Entry<Object, IExpressionAnnotation> annotationEntry : srcAnnotations.entrySet()) {
-            IExpressionAnnotation annotation = annotationEntry.getValue().copy();
-            destAnnotations.put(annotationEntry.getKey(), annotation);
-        }
+        srcAnnotations.forEach((key, value) -> destAnnotations.put(key, value.copy()));
     }
 
     private void deepCopyOpaqueParameters(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
@@ -85,7 +82,7 @@ public class LogicalExpressionDeepCopyWithNewVariablesVisitor
 
     public MutableObject<ILogicalExpression> deepCopyExpressionReference(Mutable<ILogicalExpression> exprRef)
             throws AlgebricksException {
-        return new MutableObject<ILogicalExpression>(deepCopy(exprRef.getValue()));
+        return new MutableObject<>(deepCopy(exprRef.getValue()));
     }
 
     // TODO return List<...>

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
index 934577f..a75e7d6 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.lang3.mutable.Mutable;
@@ -54,6 +53,7 @@ import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperato
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
@@ -65,7 +65,6 @@ import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOper
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
 import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
 import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
 import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
@@ -179,7 +178,7 @@ public class LogicalOperatorDeepCopyWithNewVariablesVisitor
 
     private Mutable<ILogicalOperator> deepCopyOperatorReference(Mutable<ILogicalOperator> opRef, ILogicalOperator arg)
             throws AlgebricksException {
-        return new MutableObject<ILogicalOperator>(deepCopy(opRef.getValue(), arg));
+        return new MutableObject<>(deepCopy(opRef.getValue(), arg));
     }
 
     private List<Mutable<ILogicalOperator>> deepCopyOperatorReferenceList(List<Mutable<ILogicalOperator>> list,
@@ -280,18 +279,18 @@ public class LogicalOperatorDeepCopyWithNewVariablesVisitor
     }
 
     public void updatePrimaryKeys(IOptimizationContext context) {
-        for (Map.Entry<LogicalVariable, LogicalVariable> entry : inputVarToOutputVarMapping.entrySet()) {
-            List<LogicalVariable> primaryKey = context.findPrimaryKey(entry.getKey());
+        inputVarToOutputVarMapping.forEach((key, value) -> {
+            List<LogicalVariable> primaryKey = context.findPrimaryKey(key);
             if (primaryKey != null) {
-                List<LogicalVariable> head = new ArrayList<LogicalVariable>();
+                List<LogicalVariable> head = new ArrayList<>();
                 for (LogicalVariable variable : primaryKey) {
                     head.add(inputVarToOutputVarMapping.get(variable));
                 }
-                List<LogicalVariable> tail = new ArrayList<LogicalVariable>(1);
-                tail.add(entry.getValue());
+                List<LogicalVariable> tail = new ArrayList<>(1);
+                tail.add(value);
                 context.addPrimaryKey(new FunctionalDependency(head, tail));
             }
-        }
+        });
     }
 
     public LogicalVariable varCopy(LogicalVariable var) throws AlgebricksException {
@@ -398,7 +397,7 @@ public class LogicalOperatorDeepCopyWithNewVariablesVisitor
     public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
             throws AlgebricksException {
         Mutable<ILogicalOperator> dataSourceReference = arg == null ? op.getDataSourceReference()
-                : new MutableObject<ILogicalOperator>(arg);
+                : new MutableObject<>(arg);
         NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(dataSourceReference);
         deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
         return opCopy;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
index 641557c..f59638c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
@@ -70,11 +70,11 @@ public final class UnorderedPartitionedProperty extends AbstractGroupingProperty
 
     @Override
     public void substituteColumnVars(Map<LogicalVariable, LogicalVariable> varMap) {
-        for (Map.Entry<LogicalVariable, LogicalVariable> var : varMap.entrySet()) {
-            if (columnSet.remove(var.getKey())) {
-                columnSet.add(var.getValue());
+        varMap.forEach((key, value) -> {
+            if (columnSet.remove(key)) {
+                columnSet.add(value);
             }
-        }
+        });
     }
 
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
index 2960903..f817cd6 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.commons.lang3.mutable.Mutable;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -98,13 +97,10 @@ public class PlanCompiler {
     }
 
     private void reviseEdges(IHyracksJobBuilder builder) {
-        /**
+        /*
          * revise the edges for the case of replicate operator
          */
-        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : operatorVisitedToParents
-                .entrySet()) {
-            Mutable<ILogicalOperator> child = entry.getKey();
-            List<Mutable<ILogicalOperator>> parents = entry.getValue();
+        operatorVisitedToParents.forEach((child, parents) -> {
             if (parents.size() > 1) {
                 if (child.getValue().getOperatorTag() == LogicalOperatorTag.REPLICATE
                         || child.getValue().getOperatorTag() == LogicalOperatorTag.SPLIT) {
@@ -113,7 +109,8 @@ public class PlanCompiler {
                         // make the order of the graph edges consistent with the order of rop's outputs
                         List<Mutable<ILogicalOperator>> outputs = rop.getOutputs();
                         for (Mutable<ILogicalOperator> parent : parents) {
-                            builder.contributeGraphEdge(child.getValue(), outputs.indexOf(parent), parent.getValue(), 0);
+                            builder.contributeGraphEdge(child.getValue(), outputs.indexOf(parent), parent.getValue(),
+                                    0);
                         }
                     } else {
                         int i = 0;
@@ -124,6 +121,6 @@ public class PlanCompiler {
                     }
                 }
             }
-        }
+        });
     }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
index 1bbda27..9c452bf 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
@@ -22,7 +22,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 public class DotFormatBuilder {
@@ -129,9 +128,7 @@ public class DotFormatBuilder {
         public String toString() {
             StringBuilder nodeString = new StringBuilder();
             nodeString.append(nodeId).append(" [");
-            for (Map.Entry attribute : attributes.entrySet()) {
-                nodeString.append(attribute.getKey()).append("=").append(attribute.getValue()).append(",");
-            }
+            attributes.forEach((key, value) -> nodeString.append(key).append("=").append(value).append(","));
             // remove last ","
             if (nodeString.charAt(nodeString.length() - 1) == ',') {
                 nodeString.deleteCharAt(nodeString.length() - 1);
@@ -186,9 +183,7 @@ public class DotFormatBuilder {
         public String toString() {
             StringBuilder edgeString = new StringBuilder();
             edgeString.append(source.getNodeId()).append("->").append(destination.getNodeId()).append(" [");
-            for (Map.Entry attribute : attributes.entrySet()) {
-                edgeString.append(attribute.getKey()).append("=").append(attribute.getValue()).append(",");
-            }
+            attributes.forEach((key, value) -> edgeString.append(key).append("=").append(value).append(","));
             // remove last ","
             if (edgeString.charAt(edgeString.length() - 1) == ',') {
                 edgeString.deleteCharAt(edgeString.length() - 1);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 1c2b9b5..084626e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -103,12 +103,8 @@ public class JobActivityGraphBuilder implements IActivityGraphBuilder {
     public void finish() {
         Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap = jag
                 .getConnectorActivityMap();
-        for (Map.Entry<ConnectorDescriptorId, Pair<IActivity, Integer>> e : connectorProducerMap.entrySet()) {
-            ConnectorDescriptorId cdId = e.getKey();
-            Pair<IActivity, Integer> producer = e.getValue();
-            Pair<IActivity, Integer> consumer = connectorConsumerMap.get(cdId);
-            caMap.put(cdId, Pair.of(producer, consumer));
-        }
+        connectorProducerMap
+                .forEach((cdId, producer) -> caMap.put(cdId, Pair.of(producer, connectorConsumerMap.get(cdId))));
     }
 
     private <K, V> void addToValueSet(Map<K, Set<V>> map, K n1, V n2) {

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
index 2cf96c2..9f66080 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
@@ -309,11 +309,7 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
     }
 
     private <K, V> void insertIntoIndexedMap(Map<K, List<V>> map, K key, int index, V value) {
-        List<V> vList = map.get(key);
-        if (vList == null) {
-            vList = new ArrayList<>();
-            map.put(key, vList);
-        }
+        List<V> vList = map.computeIfAbsent(key, k -> new ArrayList<>());
         extend(vList, index);
         vList.set(index, value);
     }
@@ -322,9 +318,9 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
     public String toString() {
         StringBuilder buffer = new StringBuilder();
 
-        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
-            buffer.append(e.getKey().getId()).append(" : ").append(e.getValue().toString()).append("\n");
-            List<IConnectorDescriptor> inputs = opInputMap.get(e.getKey());
+        opMap.forEach((key, value) -> {
+            buffer.append(key.getId()).append(" : ").append(value.toString()).append("\n");
+            List<IConnectorDescriptor> inputs = opInputMap.get(key);
             if (inputs != null && !inputs.isEmpty()) {
                 buffer.append("   Inputs:\n");
                 for (IConnectorDescriptor c : inputs) {
@@ -332,7 +328,7 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
                             .append("\n");
                 }
             }
-            List<IConnectorDescriptor> outputs = opOutputMap.get(e.getKey());
+            List<IConnectorDescriptor> outputs = opOutputMap.get(key);
             if (outputs != null && !outputs.isEmpty()) {
                 buffer.append("   Outputs:\n");
                 for (IConnectorDescriptor c : outputs) {
@@ -340,7 +336,7 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
                             .append("\n");
                 }
             }
-        }
+        });
 
         buffer.append("\n").append("Constraints:\n").append(userConstraints);
 
@@ -352,8 +348,8 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
         ObjectNode jjob = om.createObjectNode();
 
         ArrayNode jopArray = om.createArrayNode();
-        for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
-            ObjectNode op = e.getValue().toJSON();
+        opMap.forEach((key, value) -> {
+            ObjectNode op = value.toJSON();
             if (!userConstraints.isEmpty()) {
                 // Add operator partition constraints to each JSON operator.
                 ObjectNode pcObject = om.createObjectNode();
@@ -364,12 +360,12 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
                     ExpressionTag tag = constraint.getLValue().getTag();
                     if (tag == ExpressionTag.PARTITION_COUNT) {
                         PartitionCountExpression pce = (PartitionCountExpression) constraint.getLValue();
-                        if (e.getKey() == pce.getOperatorDescriptorId()) {
+                        if (key == pce.getOperatorDescriptorId()) {
                             pcObject.put("count", getConstraintExpressionRValue(constraint));
                         }
                     } else if (tag == ExpressionTag.PARTITION_LOCATION) {
                         PartitionLocationExpression ple = (PartitionLocationExpression) constraint.getLValue();
-                        if (e.getKey() == ple.getOperatorDescriptorId()) {
+                        if (key == ple.getOperatorDescriptorId()) {
                             pleObject.put(Integer.toString(ple.getPartition()),
                                     getConstraintExpressionRValue(constraint));
                         }
@@ -383,23 +379,23 @@ public class JobSpecification implements Serializable, IOperatorDescriptorRegist
                 }
             }
             jopArray.add(op);
-        }
+        });
         jjob.set("operators", jopArray);
 
         ArrayNode jcArray = om.createArrayNode();
-        for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
+        connMap.forEach((key, value) -> {
             ObjectNode conn = om.createObjectNode();
             Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection =
-                    connectorOpMap.get(e.getKey());
+                    connectorOpMap.get(key);
             if (connection != null) {
                 conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString());
                 conn.put("in-operator-port", connection.getLeft().getRight().intValue());
                 conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
                 conn.put("out-operator-port", connection.getRight().getRight().intValue());
             }
-            conn.set("connector", e.getValue().toJSON());
+            conn.set("connector", value.toJSON());
             jcArray.add(conn);
-        }
+        });
         jjob.set("connectors", jcArray);
 
         return jjob;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
index 20f128d..7cdb300 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
@@ -29,7 +29,6 @@ import java.util.Queue;
 import java.util.Set;
 
 import org.apache.commons.lang3.tuple.Pair;
-
 import org.apache.hyracks.api.dataflow.ActivityId;
 import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
 import org.apache.hyracks.api.dataflow.IActivity;
@@ -37,7 +36,6 @@ import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 import org.apache.hyracks.api.job.ActivityCluster;
 import org.apache.hyracks.api.job.ActivityClusterGraph;
-import org.apache.hyracks.api.job.ActivityClusterId;
 import org.apache.hyracks.api.rewriter.runtime.SuperActivity;
 
 /**
@@ -65,12 +63,10 @@ public class ActivityClusterGraphRewriter {
         acg.getActivityMap().clear();
         acg.getConnectorMap().clear();
         Map<IActivity, SuperActivity> invertedActivitySuperActivityMap = new HashMap<>();
-        for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
-            rewriteIntraActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
-        }
-        for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
-            rewriteInterActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
-        }
+        acg.getActivityClusterMap()
+                .forEach((key, value) -> rewriteIntraActivityCluster(value, invertedActivitySuperActivityMap));
+        acg.getActivityClusterMap()
+                .forEach((key, value) -> rewriteInterActivityCluster(value, invertedActivitySuperActivityMap));
         invertedActivitySuperActivityMap.clear();
     }
 
@@ -84,14 +80,11 @@ public class ActivityClusterGraphRewriter {
             Map<IActivity, SuperActivity> invertedActivitySuperActivityMap) {
         Map<ActivityId, Set<ActivityId>> blocked2BlockerMap = ac.getBlocked2BlockerMap();
         Map<ActivityId, ActivityId> invertedAid2SuperAidMap = new HashMap<>();
-        for (Entry<IActivity, SuperActivity> entry : invertedActivitySuperActivityMap.entrySet()) {
-            invertedAid2SuperAidMap.put(entry.getKey().getActivityId(), entry.getValue().getActivityId());
-        }
+        invertedActivitySuperActivityMap
+                .forEach((key, value) -> invertedAid2SuperAidMap.put(key.getActivityId(), value.getActivityId()));
         Map<ActivityId, Set<ActivityId>> replacedBlocked2BlockerMap = new HashMap<>();
-        for (Entry<ActivityId, Set<ActivityId>> entry : blocked2BlockerMap.entrySet()) {
-            ActivityId blocked = entry.getKey();
+        blocked2BlockerMap.forEach((blocked, blockers) -> {
             ActivityId replacedBlocked = invertedAid2SuperAidMap.get(blocked);
-            Set<ActivityId> blockers = entry.getValue();
             Set<ActivityId> replacedBlockers = null;
             if (blockers != null) {
                 replacedBlockers = new HashSet<>();
@@ -113,7 +106,7 @@ public class ActivityClusterGraphRewriter {
                     replacedBlocked2BlockerMap.put(replacedBlocked, existingBlockers);
                 }
             }
-        }
+        });
         blocked2BlockerMap.clear();
         blocked2BlockerMap.putAll(replacedBlocked2BlockerMap);
     }
@@ -136,23 +129,21 @@ public class ActivityClusterGraphRewriter {
         Map<ActivityId, SuperActivity> superActivities = new HashMap<>();
         Map<ActivityId, Queue<IActivity>> toBeExpendedMap = new HashMap<>();
 
-        /**
+        /*
          * Build the initial super activities
          */
-        for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
-            ActivityId activityId = entry.getKey();
-            IActivity activity = entry.getValue();
+        activities.forEach((activityId, activity) -> {
             if (activityInputMap.get(activityId) == null) {
                 startActivities.put(activityId, activity);
-                /**
+                /*
                  * use the start activity's id as the id of the super activity
                  */
                 createNewSuperActivity(ac, superActivities, toBeExpendedMap, invertedActivitySuperActivityMap,
                         activityId, activity);
             }
-        }
+        });
 
-        /**
+        /*
          * expand one-to-one connected activity cluster by the BFS order.
          * after the while-loop, the original activities are partitioned
          * into equivalent classes, one-per-super-activity.
@@ -165,19 +156,19 @@ public class ActivityClusterGraphRewriter {
                 ActivityId superActivityId = entry.getKey();
                 SuperActivity superActivity = entry.getValue();
 
-                /**
+                /*
                  * for the case where the super activity has already been swallowed
                  */
                 if (superActivities.get(superActivityId) == null) {
                     continue;
                 }
 
-                /**
+                /*
                  * expend the super activity
                  */
                 Queue<IActivity> toBeExpended = toBeExpendedMap.get(superActivityId);
                 if (toBeExpended == null) {
-                    /**
+                    /*
                      * Nothing to expand
                      */
                     continue;
@@ -191,7 +182,7 @@ public class ActivityClusterGraphRewriter {
                         IActivity newActivity = endPoints.getRight().getLeft();
                         SuperActivity existingSuperActivity = invertedActivitySuperActivityMap.get(newActivity);
                         if (outputConn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
-                            /**
+                            /*
                              * expend the super activity cluster on an one-to-one out-bound connection
                              */
                             if (existingSuperActivity == null) {
@@ -199,13 +190,13 @@ public class ActivityClusterGraphRewriter {
                                 toBeExpended.add(newActivity);
                                 invertedActivitySuperActivityMap.put(newActivity, superActivity);
                             } else {
-                                /**
+                                /*
                                  * the two activities already in the same super activity
                                  */
                                 if (existingSuperActivity == superActivity) {
                                     continue;
                                 }
-                                /**
+                                /*
                                  * swallow an existing super activity
                                  */
                                 swallowExistingSuperActivity(superActivities, toBeExpendedMap,
@@ -214,7 +205,7 @@ public class ActivityClusterGraphRewriter {
                             }
                         } else {
                             if (existingSuperActivity == null) {
-                                /**
+                                /*
                                  * create new activity
                                  */
                                 createNewSuperActivity(ac, superActivities, toBeExpendedMap,
@@ -224,10 +215,10 @@ public class ActivityClusterGraphRewriter {
                     }
                 }
 
-                /**
+                /*
                  * remove the to-be-expended queue if it is empty
                  */
-                if (toBeExpended.size() == 0) {
+                if (toBeExpended.isEmpty()) {
                     toBeExpendedMap.remove(superActivityId);
                 }
             }
@@ -237,28 +228,25 @@ public class ActivityClusterGraphRewriter {
         Map<ConnectorDescriptorId, RecordDescriptor> connRecordDesc = ac.getConnectorRecordDescriptorMap();
         Map<SuperActivity, Integer> superActivityProducerPort = new HashMap<>();
         Map<SuperActivity, Integer> superActivityConsumerPort = new HashMap<>();
-        for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
-            superActivityProducerPort.put(entry.getValue(), 0);
-            superActivityConsumerPort.put(entry.getValue(), 0);
-        }
+        superActivities.forEach((key, value) -> {
+            superActivityProducerPort.put(value, 0);
+            superActivityConsumerPort.put(value, 0);
+        });
 
-        /**
+        /*
          * create a new activity cluster to replace the old activity cluster
          */
         ActivityCluster newActivityCluster = new ActivityCluster(acg, ac.getId());
         newActivityCluster.setConnectorPolicyAssignmentPolicy(ac.getConnectorPolicyAssignmentPolicy());
-        for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
-            newActivityCluster.addActivity(entry.getValue());
-            acg.getActivityMap().put(entry.getKey(), newActivityCluster);
-        }
+        superActivities.forEach((key, value) -> {
+            newActivityCluster.addActivity(value);
+            acg.getActivityMap().put(key, newActivityCluster);
+        });
 
-        /**
+        /*
          * Setup connectors: either inside a super activity or among super activities
          */
-        for (Entry<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> entry : connectorActivityMap
-                .entrySet()) {
-            ConnectorDescriptorId connectorId = entry.getKey();
-            Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = entry.getValue();
+        connectorActivityMap.forEach((connectorId, endPoints) -> {
             IActivity producerActivity = endPoints.getLeft().getLeft();
             IActivity consumerActivity = endPoints.getRight().getLeft();
             int producerPort = endPoints.getLeft().getRight();
@@ -266,14 +254,14 @@ public class ActivityClusterGraphRewriter {
             RecordDescriptor recordDescriptor = connRecordDesc.get(connectorId);
             IConnectorDescriptor conn = connMap.get(connectorId);
             if (conn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
-                /**
+                /*
                  * connection edge between inner activities
                  */
                 SuperActivity residingSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
                 residingSuperActivity.connect(conn, producerActivity, producerPort, consumerActivity, consumerPort,
                         recordDescriptor);
             } else {
-                /**
+                /*
                  * connection edge between super activities
                  */
                 SuperActivity producerSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
@@ -284,7 +272,7 @@ public class ActivityClusterGraphRewriter {
                 newActivityCluster.connect(conn, producerSuperActivity, producerSAPort, consumerSuperActivity,
                         consumerSAPort, recordDescriptor);
 
-                /**
+                /*
                  * bridge the port
                  */
                 producerSuperActivity.setClusterOutputIndex(producerSAPort, producerActivity.getActivityId(),
@@ -293,30 +281,30 @@ public class ActivityClusterGraphRewriter {
                         consumerPort);
                 acg.getConnectorMap().put(connectorId, newActivityCluster);
 
-                /**
+                /*
                  * increasing the port number for the producer and consumer
                  */
                 superActivityProducerPort.put(producerSuperActivity, ++producerSAPort);
                 superActivityConsumerPort.put(consumerSuperActivity, ++consumerSAPort);
             }
-        }
+        });
 
-        /**
+        /*
          * Set up the roots of the new activity cluster
          */
-        for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
-            List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(entry.getKey());
+        superActivities.forEach((key, value) -> {
+            List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(key);
             if (connIds == null || connIds.isEmpty()) {
-                newActivityCluster.addRoot(entry.getValue());
+                newActivityCluster.addRoot(value);
             }
-        }
+        });
 
-        /**
+        /*
          * set up the blocked2Blocker mapping, which will be updated in the rewriteInterActivityCluster call
          */
         newActivityCluster.getBlocked2BlockerMap().putAll(ac.getBlocked2BlockerMap());
 
-        /**
+        /*
          * replace the old activity cluster with the new activity cluster
          */
         acg.getActivityClusterMap().put(ac.getId(), newActivityCluster);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
index 476a744..ddcfd78 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
@@ -59,17 +59,17 @@ public class SuperActivity extends OneToOneConnectedActivityCluster implements I
             throws HyracksDataException {
         final Map<ActivityId, IActivity> startActivities = new HashMap<>();
         Map<ActivityId, IActivity> activities = getActivityMap();
-        for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
-            /**
+        activities.forEach((key, value) -> {
+            /*
              * extract start activities
              */
-            List<IConnectorDescriptor> conns = getActivityInputMap().get(entry.getKey());
+            List<IConnectorDescriptor> conns = getActivityInputMap().get(key);
             if (conns == null || conns.isEmpty()) {
-                startActivities.put(entry.getKey(), entry.getValue());
+                startActivities.put(key, value);
             }
-        }
+        });
 
-        /**
+        /*
          * wrap a RecordDescriptorProvider for the super activity
          */
         IRecordDescriptorProvider wrappedRecDescProvider = new IRecordDescriptorProvider() {
@@ -77,7 +77,7 @@ public class SuperActivity extends OneToOneConnectedActivityCluster implements I
             @Override
             public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
                 if (startActivities.get(aid) != null) {
-                    /**
+                    /*
                      * if the activity is a start (input boundary) activity
                      */
                     int superActivityInputChannel = SuperActivity.this.getClusterInputIndex(Pair.of(aid, inputIndex));
@@ -86,14 +86,14 @@ public class SuperActivity extends OneToOneConnectedActivityCluster implements I
                     }
                 }
                 if (SuperActivity.this.getActivityMap().get(aid) != null) {
-                    /**
+                    /*
                      * if the activity is an internal activity of the super activity
                      */
                     IConnectorDescriptor conn = getActivityInputMap().get(aid).get(inputIndex);
                     return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                 }
 
-                /**
+                /*
                  * the following is for the case where the activity is in other SuperActivities
                  */
                 ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
@@ -121,7 +121,7 @@ public class SuperActivity extends OneToOneConnectedActivityCluster implements I
 
             @Override
             public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
-                /**
+                /*
                  * if the activity is an output-boundary activity
                  */
                 int superActivityOutputChannel = SuperActivity.this.getClusterOutputIndex(Pair.of(aid, outputIndex));
@@ -130,14 +130,14 @@ public class SuperActivity extends OneToOneConnectedActivityCluster implements I
                 }
 
                 if (SuperActivity.this.getActivityMap().get(aid) != null) {
-                    /**
+                    /*
                      * if the activity is an internal activity of the super activity
                      */
                     IConnectorDescriptor conn = getActivityOutputMap().get(aid).get(outputIndex);
                     return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
                 }
 
-                /**
+                /*
                  * the following is for the case where the activity is in other SuperActivities
                  */
                 ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
index 285e932..e9491f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
@@ -56,9 +56,9 @@ public class ErrorMessageUtil {
         Properties prop = new Properties();
         Map<Integer, String> errorMessageMap = new HashMap<>();
         prop.load(resourceStream);
-        for (Map.Entry<Object, Object> entry : prop.entrySet()) {
-            String key = (String) entry.getKey();
-            String msg = (String) entry.getValue();
+        prop.forEach((key1, value) -> {
+            String key = (String) key1;
+            String msg = (String) value;
             if (key.contains(COMMA)) {
                 String[] codes = key.split(COMMA);
                 for (String code : codes) {
@@ -67,7 +67,7 @@ public class ErrorMessageUtil {
             } else {
                 errorMessageMap.put(Integer.parseInt(key), msg);
             }
-        }
+        });
         return errorMessageMap;
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
index 66f7f10..e5eec11 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
@@ -22,7 +22,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
-import java.util.Map;
 
 public class ExecutionTimeProfiler {
 
@@ -67,9 +66,7 @@ public class ExecutionTimeProfiler {
         try {
             synchronized (lock1) {
                 sb.append("\n\n");
-                for (Map.Entry<String, String> entry : spentTimePerJobMap.get(jobSignature).entrySet()) {
-                    sb.append(entry.getValue());
-                }
+                spentTimePerJobMap.get(jobSignature).forEach((key, value) -> sb.append(value));
                 fos.write(sb.toString().getBytes());
                 fos.flush();
                 spentTimePerJobMap.get(jobSignature).clear();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index a3fbb70..7b99df2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -260,9 +260,9 @@ public class ClusterControllerService implements IControllerService {
     }
 
     private void connectNCs() {
-        getNCServices().entrySet().forEach(ncService -> {
+        getNCServices().forEach((key, value) -> {
             final TriggerNCWork triggerWork = new TriggerNCWork(ClusterControllerService.this,
-                    ncService.getValue().getLeft(), ncService.getValue().getRight(), ncService.getKey());
+                    value.getLeft(), value.getRight(), key);
             executor.submit(triggerWork);
         });
         serviceCtx.addClusterLifecycleListener(new IClusterLifecycleListener() {
@@ -289,10 +289,9 @@ public class ClusterControllerService implements IControllerService {
 
     private void terminateNCServices() throws Exception {
         List<ShutdownNCServiceWork> shutdownNCServiceWorks = new ArrayList<>();
-        getNCServices().entrySet().forEach(ncService -> {
-            if (ncService.getValue().getRight() != NCConfig.NCSERVICE_PORT_DISABLED) {
-                ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(ncService.getValue().getLeft(),
-                        ncService.getValue().getRight(), ncService.getKey());
+        getNCServices().forEach((key, value) -> {
+            if (value.getRight() != NCConfig.NCSERVICE_PORT_DISABLED) {
+                ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(value.getLeft(), value.getRight(), key);
                 workQueue.schedule(shutdownWork);
                 shutdownNCServiceWorks.add(shutdownWork);
             }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index a380967..3cd6235 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -135,11 +135,9 @@ public class NodeManager implements INodeManager {
     @Override
     public Map<String, NodeControllerInfo> getNodeControllerInfoMap() {
         Map<String, NodeControllerInfo> result = new LinkedHashMap<>();
-        for (Map.Entry<String, NodeControllerState> e : nodeRegistry.entrySet()) {
-            NodeControllerState ncState = e.getValue();
-            result.put(e.getKey(), new NodeControllerInfo(e.getKey(), NodeStatus.ALIVE, ncState.getDataPort(),
-                    ncState.getDatasetPort(), ncState.getMessagingPort(), ncState.getCapacity().getCores()));
-        }
+        nodeRegistry.forEach(
+                (key, ncState) -> result.put(key, new NodeControllerInfo(key, NodeStatus.ALIVE, ncState.getDataPort(),
+                        ncState.getDatasetPort(), ncState.getMessagingPort(), ncState.getCapacity().getCores())));
         return result;
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index 57b8c50..04166a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -242,15 +242,15 @@ class ActivityClusterPlanner {
 
         JobRun jobRun = executor.getJobRun();
         Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = jobRun.getConnectorPolicyMap();
-        for (Map.Entry<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> e : taskConnectivity.entrySet()) {
-            Set<TaskId> cluster = taskClusterMap.get(e.getKey());
-            for (Pair<TaskId, ConnectorDescriptorId> p : e.getValue()) {
+        taskConnectivity.forEach((key, value) -> {
+            Set<TaskId> cluster = taskClusterMap.get(key);
+            for (Pair<TaskId, ConnectorDescriptorId> p : value) {
                 IConnectorPolicy cPolicy = connectorPolicies.get(p.getRight());
                 if (cPolicy.requiresProducerConsumerCoscheduling()) {
                     cluster.add(p.getLeft());
                 }
             }
-        }
+        });
 
         /*
          * We compute the transitive closure of this (producer-consumer) relation to find the largest set of

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
index a3078b6..8a69a6f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
@@ -65,8 +65,8 @@ import org.apache.hyracks.control.cc.partitions.PartitionMatchMaker;
 import org.apache.hyracks.control.cc.work.JobCleanupWork;
 import org.apache.hyracks.control.common.job.PartitionState;
 import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
-import org.apache.hyracks.control.common.work.NoOpCallback;
 import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.hyracks.control.common.work.NoOpCallback;
 
 public class JobExecutor {
     private static final Logger LOGGER = Logger.getLogger(JobExecutor.class.getName());
@@ -379,14 +379,13 @@ public class JobExecutor {
         tcAttempt.initializePendingTaskCounter();
         tcAttempts.add(tcAttempt);
 
-        /**
+        /*
          * Improvement for reducing master/slave message communications, for each TaskAttemptDescriptor,
          * we set the NetworkAddress[][] partitionLocations, in which each row is for an incoming connector descriptor
          * and each column is for an input channel of the connector.
          */
         INodeManager nodeManager = ccs.getNodeManager();
-        for (Map.Entry<String, List<TaskAttemptDescriptor>> e : taskAttemptMap.entrySet()) {
-            List<TaskAttemptDescriptor> tads = e.getValue();
+        taskAttemptMap.forEach((key, tads) -> {
             for (TaskAttemptDescriptor tad : tads) {
                 TaskAttemptId taid = tad.getTaskAttemptId();
                 int attempt = taid.getAttempt();
@@ -401,7 +400,7 @@ public class JobExecutor {
                 for (int i = 0; i < inPartitionCounts.length; ++i) {
                     ConnectorDescriptorId cdId = inConnectors.get(i).getConnectorId();
                     IConnectorPolicy policy = jobRun.getConnectorPolicyMap().get(cdId);
-                    /**
+                    /*
                      * carry sender location information into a task
                      * when it is not the case that it is an re-attempt and the send-side
                      * is materialized blocking.
@@ -419,7 +418,7 @@ public class JobExecutor {
                 }
                 tad.setInputPartitionLocations(partitionLocations);
             }
-        }
+        });
 
         tcAttempt.setStatus(TaskClusterAttempt.TaskClusterStatus.RUNNING);
         tcAttempt.setStartTime(System.currentTimeMillis());
@@ -560,12 +559,11 @@ public class JobExecutor {
         final JobId jobId = jobRun.getJobId();
         LOGGER.info("Abort map for job: " + jobId + ": " + abortTaskAttemptMap);
         INodeManager nodeManager = ccs.getNodeManager();
-        for (Map.Entry<String, List<TaskAttemptId>> entry : abortTaskAttemptMap.entrySet()) {
-            final NodeControllerState node = nodeManager.getNodeControllerState(entry.getKey());
-            final List<TaskAttemptId> abortTaskAttempts = entry.getValue();
+        abortTaskAttemptMap.forEach((key, abortTaskAttempts) -> {
+            final NodeControllerState node = nodeManager.getNodeControllerState(key);
             if (node != null) {
                 if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Aborting: " + abortTaskAttempts + " at " + entry.getKey());
+                    LOGGER.info("Aborting: " + abortTaskAttempts + " at " + key);
                 }
                 try {
                     node.getNodeController().abortTasks(jobId, abortTaskAttempts);
@@ -573,7 +571,7 @@ public class JobExecutor {
                     LOGGER.log(Level.SEVERE, e.getMessage(), e);
                 }
             }
-        }
+        });
         inProgressTaskClusters.remove(tcAttempt.getTaskCluster());
         TaskCluster tc = tcAttempt.getTaskCluster();
         PartitionMatchMaker pmm = jobRun.getPartitionMatchMaker();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
index 95a6d9b..ef0bca2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
@@ -284,10 +284,9 @@ public class JobRun implements IJobStatusConditionVariable {
                 ObjectNode planJSON = om.createObjectNode();
 
                 ArrayNode acTasks = om.createArrayNode();
-                for (Map.Entry<ActivityId, ActivityPlan> e : acp.getActivityPlanMap().entrySet()) {
-                    ActivityPlan acPlan = e.getValue();
+                acp.getActivityPlanMap().forEach((key, acPlan) -> {
                     ObjectNode entry = om.createObjectNode();
-                    entry.put("activity-id", e.getKey().toString());
+                    entry.put("activity-id", key.toString());
 
                     ActivityPartitionDetails apd = acPlan.getActivityPartitionDetails();
                     entry.put("partition-count", apd.getPartitionCount());
@@ -319,21 +318,21 @@ public class JobRun implements IJobStatusConditionVariable {
                         ArrayNode dependentTasksJSON = om.createArrayNode();
                         for (TaskId dependent : t.getDependents()) {
                             dependentTasksJSON.add(dependent.toString());
-                        task.set("dependents", dependentTasksJSON);
+                            task.set("dependents", dependentTasksJSON);
 
-                        ArrayNode dependencyTasksJSON = om.createArrayNode();
-                        for (TaskId dependency : t.getDependencies()) {
-                            dependencyTasksJSON.add(dependency.toString());
-                        }
-                        task.set("dependencies", dependencyTasksJSON);
+                            ArrayNode dependencyTasksJSON = om.createArrayNode();
+                            for (TaskId dependency : t.getDependencies()) {
+                                dependencyTasksJSON.add(dependency.toString());
+                            }
+                            task.set("dependencies", dependencyTasksJSON);
 
-                        tasks.add(task);
-                    }
-                    entry.set("tasks", tasks);
+                            tasks.add(task);
+                        }
+                        entry.set("tasks", tasks);
 
-                    acTasks.add(entry);
+                        acTasks.add(entry);
                     }
-                }
+                });
                 planJSON.set("activities", acTasks);
 
                 ArrayNode tClusters = om.createArrayNode();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
index ee22768..3c26f84 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
@@ -18,16 +18,14 @@
  */
 package org.apache.hyracks.control.cc.web;
 
-import java.util.Map;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-
 import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.cc.web.util.IJSONOutputFunction;
 import org.apache.hyracks.control.cc.work.GatherStateDumpsWork;
 import org.apache.hyracks.control.cc.work.GatherStateDumpsWork.StateDumpRun;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
 public class StateDumpRESTAPIFunction implements IJSONOutputFunction {
     private final ClusterControllerService ccs;
 
@@ -44,9 +42,7 @@ public class StateDumpRESTAPIFunction implements IJSONOutputFunction {
 
         ObjectMapper om = new ObjectMapper();
         ObjectNode result = om.createObjectNode();
-        for (Map.Entry<String, String> e : sdr.getStateDump().entrySet()) {
-            result.put(e.getKey(), e.getValue());
-        }
+        sdr.getStateDump().forEach(result::put);
         return result;
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index d13b5e6..67738ba 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -344,20 +344,20 @@ public class ConfigManager implements IConfigManager, Serializable {
 
     private void applyDefaults() {
         LOGGER.fine("applying defaults");
-        for (Map.Entry<Section, Map<String, IOption>> entry : sectionMap.entrySet()) {
-            if (entry.getKey() == Section.NC) {
-                entry.getValue().values().forEach(option -> getNodeNames()
+        sectionMap.forEach((key, value) -> {
+            if (key == Section.NC) {
+                value.values().forEach(option -> getNodeNames()
                         .forEach(node -> getOrDefault(getNodeEffectiveMap(node), option, node)));
                 for (Map.Entry<String, Map<IOption, Object>> nodeMap : nodeSpecificMap.entrySet()) {
-                    entry.getValue().values()
+                    value.values()
                             .forEach(option -> getOrDefault(
                                     new CompositeMap<>(nodeMap.getValue(), definedMap, new NoOpMapMutator()), option,
                                     nodeMap.getKey()));
                 }
             } else {
-                entry.getValue().values().forEach(option -> getOrDefault(configurationMap, option, null));
+                value.values().forEach(option -> getOrDefault(configurationMap, option, null));
             }
-        }
+        });
     }
 
     private Object getOrDefault(Map<IOption, Object> map, IOption option, String nodeId) {
@@ -450,15 +450,13 @@ public class ConfigManager implements IConfigManager, Serializable {
 
     public Ini toIni(boolean includeDefaults) {
         Ini ini = new Ini();
-        for (Map.Entry<IOption, Object> entry : (includeDefaults ? configurationMap : definedMap).entrySet()) {
-            if (entry.getValue() != null) {
-                final IOption option = entry.getKey();
-                ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(entry.getValue()));
+        (includeDefaults ? configurationMap : definedMap).forEach((option, value) -> {
+            if (value != null) {
+                ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(value));
             }
-        }
-        for (Map.Entry<String, Map<IOption, Object>> nodeMapEntry : nodeSpecificMap.entrySet()) {
-            String section = Section.NC.sectionName() + "/" + nodeMapEntry.getKey();
-            final Map<IOption, Object> nodeValueMap = nodeMapEntry.getValue();
+        });
+        nodeSpecificMap.forEach((key, nodeValueMap) -> {
+            String section = Section.NC.sectionName() + "/" + key;
             synchronized (nodeValueMap) {
                 for (Map.Entry<IOption, Object> entry : nodeValueMap.entrySet()) {
                     if (entry.getValue() != null) {
@@ -467,7 +465,7 @@ public class ConfigManager implements IConfigManager, Serializable {
                     }
                 }
             }
-        }
+        });
         return ini;
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
index d1d33a5..90dfc8c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
@@ -26,19 +26,19 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hyracks.api.io.IWritable;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 
-import org.apache.hyracks.api.io.IWritable;
-
 public abstract class AbstractProfile implements IWritable, Serializable {
     private static final long serialVersionUID = 1L;
 
     protected Map<String, Long> counters;
 
     public AbstractProfile() {
-        counters = new HashMap<String, Long>();
+        counters = new HashMap<>();
     }
 
     public Map<String, Long> getCounters() {
@@ -50,12 +50,12 @@ public abstract class AbstractProfile implements IWritable, Serializable {
     protected void populateCounters(ObjectNode jo) {
         ObjectMapper om = new ObjectMapper();
         ArrayNode countersObj = om.createArrayNode();
-        for (Map.Entry<String, Long> e : counters.entrySet()) {
+        counters.forEach((key, value) -> {
             ObjectNode jpe = om.createObjectNode();
-            jpe.put("name", e.getKey());
-            jpe.put("value", e.getValue());
+            jpe.put("name", key);
+            jpe.put("value", value);
             countersObj.add(jpe);
-        }
+        });
         jo.set("counters", countersObj);
     }
 
@@ -75,7 +75,7 @@ public abstract class AbstractProfile implements IWritable, Serializable {
     @Override
     public void readFields(DataInput input) throws IOException {
         int size = input.readInt();
-        counters = new HashMap<String, Long>();
+        counters = new HashMap<>();
         for (int i = 0; i < size; i++) {
             String key = input.readUTF();
             long value = input.readLong();

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
index 79a5538..64d074b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
@@ -25,10 +25,11 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hyracks.api.job.JobId;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.job.JobId;
 
 public class JobProfile extends AbstractProfile {
     private static final long serialVersionUID = 1L;
@@ -49,7 +50,7 @@ public class JobProfile extends AbstractProfile {
 
     public JobProfile(JobId jobId) {
         this.jobId = jobId;
-        jobletProfiles = new HashMap<String, JobletProfile>();
+        jobletProfiles = new HashMap<>();
     }
 
     public JobId getJobId() {
@@ -91,7 +92,7 @@ public class JobProfile extends AbstractProfile {
     public void readFields(DataInput input) throws IOException {
         jobId = JobId.create(input);
         int size = input.readInt();
-        jobletProfiles = new HashMap<String, JobletProfile>();
+        jobletProfiles = new HashMap<>();
         for (int i = 0; i < size; i++) {
             String key = input.readUTF();
             JobletProfile value = JobletProfile.create(input);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
index c3792df..5bdb1b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
@@ -25,11 +25,11 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import org.apache.hyracks.api.dataflow.TaskAttemptId;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.dataflow.TaskAttemptId;
 
 public class JobletProfile extends AbstractProfile {
     private static final long serialVersionUID = 1L;
@@ -50,7 +50,7 @@ public class JobletProfile extends AbstractProfile {
 
     public JobletProfile(String nodeId) {
         this.nodeId = nodeId;
-        taskProfiles = new HashMap<TaskAttemptId, TaskProfile>();
+        taskProfiles = new HashMap<>();
     }
 
     public String getNodeId() {
@@ -67,7 +67,7 @@ public class JobletProfile extends AbstractProfile {
         ObjectMapper om = new ObjectMapper();
         ObjectNode json = om.createObjectNode();
 
-        json.put("node-id", nodeId.toString());
+        json.put("node-id", nodeId);
         populateCounters(json);
         ArrayNode tasks = om.createArrayNode();
         for (TaskProfile p : taskProfiles.values()) {
@@ -94,7 +94,7 @@ public class JobletProfile extends AbstractProfile {
         super.readFields(input);
         nodeId = input.readUTF();
         int size = input.readInt();
-        taskProfiles = new HashMap<TaskAttemptId, TaskProfile>();
+        taskProfiles = new HashMap<>();
         for (int i = 0; i < size; i++) {
             TaskAttemptId key = TaskAttemptId.create(input);
             TaskProfile value = TaskProfile.create(input);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
index 6dc9619..66a5e0f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
@@ -186,9 +186,7 @@ public class Joblet implements IHyracksJobletContext, ICounterContext {
 
     public void dumpProfile(JobletProfile jProfile) {
         Map<String, Long> counters = jProfile.getCounters();
-        for (Map.Entry<String, Counter> e : counterMap.entrySet()) {
-            counters.put(e.getKey(), e.getValue().get());
-        }
+        counterMap.forEach((key, value) -> counters.put(key, value.get()));
         for (Task task : taskMap.values()) {
             TaskProfile taskProfile = new TaskProfile(task.getTaskAttemptId(),
                     new Hashtable<>(task.getPartitionSendProfile()), new StatsCollector());

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
index c58a2fa..11148a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
@@ -35,9 +35,9 @@ import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import org.apache.hyracks.util.IntSerDeUtils;
 import org.apache.hyracks.dataflow.std.sort.Utility;
 import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.hyracks.util.IntSerDeUtils;
 
 public abstract class AbstractTupleMemoryManagerTest {
     ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] {
@@ -51,13 +51,13 @@ public abstract class AbstractTupleMemoryManagerTest {
 
     protected void assertEachTupleInFTAIsInBuffer(Map<Integer, Integer> map, Map<TuplePointer, Integer> mapInserted) {
         ITuplePointerAccessor accessor = getTuplePointerAccessor();
-        for (Map.Entry<TuplePointer, Integer> entry : mapInserted.entrySet()) {
-            accessor.reset(entry.getKey());
-            int dataLength = map.get(entry.getValue());
-            assertEquals((int) entry.getValue(),
+        mapInserted.forEach((key, value) -> {
+            accessor.reset(key);
+            int dataLength = map.get(value);
+            assertEquals((int) value,
                     IntSerDeUtils.getInt(accessor.getBuffer().array(), accessor.getAbsFieldStartOffset(0)));
             assertEquals(dataLength, accessor.getTupleLength());
-        }
+        });
         assertEquals(map.size(), mapInserted.size());
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/8ecbff11/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
index 521dff1..b0c210f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
@@ -130,15 +130,15 @@ public abstract class AbstractExternalGroupbyTest {
 
         @Override
         public void open() throws HyracksDataException {
-            for (Map.Entry<Integer, String> keyValue : keyValueMap.entrySet()) {
-                Result result = answer.get(keyValue.getValue());
+            keyValueMap.forEach((key, value) -> {
+                Result result = answer.get(value);
                 if (result == null) {
-                    answer.put(keyValue.getValue(), new Result(keyValue.getKey()));
+                    answer.put(value, new Result(key));
                 } else {
-                    result.sum += keyValue.getKey();
+                    result.sum += key;
                     result.count++;
                 }
-            }
+            });
         }
 
         @Override