[NO ISSUE][HYR][*DB] Minor refactoring / address SonarQube comments
Change-Id: Icf10b6df0fdc006675d8f0da6fd06d50200c6b6a
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2098
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Contrib: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java
index 1b1bc93..2b98762 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java
@@ -111,9 +111,7 @@
}
}
// Removes subplans.
- for (Map.Entry<AbstractOperatorWithNestedPlans, List<ILogicalPlan>> entry : nspToSubplanListMap.entrySet()) {
- entry.getKey().getNestedPlans().removeAll(entry.getValue());
- }
+ nspToSubplanListMap.forEach((key, value) -> key.getNestedPlans().removeAll(value));
}
private boolean collectVarsBottomUp(Mutable<ILogicalOperator> opRef, IOptimizationContext context,
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
index 19c6da7..8e09164 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
@@ -653,11 +653,11 @@
// Replace the variables in the join condition based on the mapping of variables
// in the new probe subtree.
Map<LogicalVariable, LogicalVariable> varMapping = firstDeepCopyVisitor.getInputToOutputVariableMapping();
- for (Map.Entry<LogicalVariable, LogicalVariable> varMapEntry : varMapping.entrySet()) {
- if (varMapEntry.getKey() != varMapEntry.getValue()) {
- joinCond.substituteVar(varMapEntry.getKey(), varMapEntry.getValue());
+ varMapping.forEach((key, value) -> {
+ if (key != value) {
+ joinCond.substituteVar(key, value);
}
- }
+ });
return originalProbeSubTreeRootRef;
}
@@ -726,9 +726,7 @@
// condition since we deep-copied one of the scanner subtrees which
// changed variables.
AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) joinRef.getValue();
- for (Map.Entry<LogicalVariable, LogicalVariable> entry : copyVarMap.entrySet()) {
- joinOp.getCondition().getValue().substituteVar(entry.getKey(), entry.getValue());
- }
+ copyVarMap.forEach((key, value) -> joinOp.getCondition().getValue().substituteVar(key, value));
joinOp.getInputs().clear();
joinOp.getInputs().add(new MutableObject<ILogicalOperator>(scanSubTree));
// Make sure that the build input (which may be materialized causing blocking) comes from
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
index 961dcc1..9a49472 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
@@ -53,10 +53,10 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
@@ -86,43 +86,43 @@
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
import org.apache.hyracks.algebricks.core.algebra.visitors.IQueryOperatorVisitor;
-/*
- This visitor inlines all nested tuple source operators in the query
- plan rooted at the operator being visited, with a deep copy of the query
- plan rooted at the input <code>subplanInputOperator</code>.
-
- The visitor ensures that the variables used to correlate between the
- query plan rooted at <code>subplanInputOperator</code> are propagated
- to the operator being visited.
-
- ----------------------------------
- Here is an abstract example.
- The original query plan:
- --Op1
- --Subplan{
- --AggregateOp
- --NestedOp
- .....
- --Nested-Tuple-Source
- }
- --InputOp
- .....
-
- After we call NestedOp.accept(....) with this visitor. We will get an
- intermediate plan that looks like:
- --Op1
- --Subplan{
- --AggregateOp
- --NestedOp
- .....
- --InputOp'
- ....
- }
- --InputOp
- .....
- The plan rooted at InputOp' is a deep copy of the plan rooted at InputOp
- with a different set of variables.
-
+/**
+ * This visitor inlines all nested tuple source operators in the query
+ * plan rooted at the operator being visited, with a deep copy of the query
+ * plan rooted at the input <code>subplanInputOperator</code>.
+ *
+ * The visitor ensures that the variables used to correlate between the
+ * query plan rooted at <code>subplanInputOperator</code> are propagated
+ * to the operator being visited.
+ *
+ * ----------------------------------
+ * Here is an abstract example.
+ * The original query plan:
+ * --Op1
+ * --Subplan{
+ * --AggregateOp
+ * --NestedOp
+ * .....
+ * --Nested-Tuple-Source
+ * }
+ * --InputOp
+ * .....
+ *
+ * After we call NestedOp.accept(....) with this visitor. We will get an
+ * intermediate plan that looks like:
+ * --Op1
+ * --Subplan{
+ * --AggregateOp
+ * --NestedOp
+ * .....
+ * --InputOp'
+ * ....
+ * }
+ * --InputOp
+ * .....
+ * The plan rooted at InputOp' is a deep copy of the plan rooted at InputOp
+ * with a different set of variables.
+ *
*/
class InlineAllNtsInSubplanVisitor implements IQueryOperatorVisitor<ILogicalOperator, Void> {
// The optimization context.
@@ -218,8 +218,8 @@
for (LogicalVariable keyVar : correlatedKeyVars) {
if (!groupKeyVars.contains(keyVar)) {
LogicalVariable newVar = context.newVar();
- op.getGroupByList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(newVar,
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(keyVar))));
+ op.getGroupByList()
+ .add(new Pair<>(newVar, new MutableObject<>(new VariableReferenceExpression(keyVar))));
addedGroupKeyMapping.put(keyVar, newVar);
}
}
@@ -237,9 +237,7 @@
}
// Updates the var mapping for added group-by keys.
- for (Map.Entry<LogicalVariable, LogicalVariable> entry : addedGroupKeyMapping.entrySet()) {
- updateInputToOutputVarMapping(entry.getKey(), entry.getValue(), false);
- }
+ addedGroupKeyMapping.forEach((key, value) -> updateInputToOutputVarMapping(key, value, false));
return op;
}
@@ -260,23 +258,23 @@
ILogicalOperator assignOp = assignOpAndRecordVar.first;
LogicalVariable recordVar = assignOpAndRecordVar.second;
ILogicalOperator inputOp = op.getInputs().get(0).getValue();
- assignOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
+ assignOp.getInputs().add(new MutableObject<>(inputOp));
// Rewrites limit to a group-by with limit as its nested operator.
Pair<ILogicalOperator, LogicalVariable> gbyOpAndAggVar = wrapLimitInGroupBy(op, recordVar, inputLiveVars);
ILogicalOperator gbyOp = gbyOpAndAggVar.first;
LogicalVariable aggVar = gbyOpAndAggVar.second;
- gbyOp.getInputs().add(new MutableObject<ILogicalOperator>(assignOp));
+ gbyOp.getInputs().add(new MutableObject<>(assignOp));
// Adds an unnest operators on top of the group-by operator.
Pair<ILogicalOperator, LogicalVariable> unnestOpAndUnnestVar = createUnnestForAggregatedList(aggVar);
ILogicalOperator unnestOp = unnestOpAndUnnestVar.first;
LogicalVariable unnestVar = unnestOpAndUnnestVar.second;
- unnestOp.getInputs().add(new MutableObject<ILogicalOperator>(gbyOp));
+ unnestOp.getInputs().add(new MutableObject<>(gbyOp));
// Adds field accesses to recover input live variables.
ILogicalOperator fieldAccessAssignOp = createFieldAccessAssignOperator(unnestVar, inputLiveVars);
- fieldAccessAssignOp.getInputs().add(new MutableObject<ILogicalOperator>(unnestOp));
+ fieldAccessAssignOp.getInputs().add(new MutableObject<>(unnestOp));
OperatorManipulationUtil.computeTypeEnvironmentBottomUp(fieldAccessAssignOp, context);
return fieldAccessAssignOp;
@@ -288,10 +286,10 @@
List<Mutable<ILogicalExpression>> recordConstructorArgs = new ArrayList<>();
for (LogicalVariable inputLiveVar : inputLiveVars) {
if (!correlatedKeyVars.contains(inputLiveVar)) {
- recordConstructorArgs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+ recordConstructorArgs.add(new MutableObject<>(new ConstantExpression(
new AsterixConstantValue(new AString(Integer.toString(inputLiveVar.getId()))))));
recordConstructorArgs
- .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(inputLiveVar)));
+ .add(new MutableObject<>(new VariableReferenceExpression(inputLiveVar)));
}
}
LogicalVariable recordVar = context.newVar();
@@ -300,7 +298,7 @@
FunctionUtil.getFunctionInfo(BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR),
recordConstructorArgs));
AssignOperator assignOp = new AssignOperator(recordVar, recordExprRef);
- return new Pair<ILogicalOperator, LogicalVariable>(assignOp, recordVar);
+ return new Pair<>(assignOp, recordVar);
}
private Pair<ILogicalOperator, LogicalVariable> wrapLimitInGroupBy(ILogicalOperator op, LogicalVariable recordVar,
@@ -345,7 +343,7 @@
// Adds a nested tuple source operator as the input operator to the
// limit operator.
- NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(gbyOp));
+ NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<>(gbyOp));
currentOp.getInputs().add(new MutableObject<>(nts));
// Sets the root of the added nested plan to the aggregate operator.
@@ -365,15 +363,15 @@
private Pair<ILogicalOperator, LogicalVariable> createUnnestForAggregatedList(LogicalVariable aggVar) {
LogicalVariable unnestVar = context.newVar();
// Creates an unnest function expression.
- Mutable<ILogicalExpression> unnestArg = new MutableObject<ILogicalExpression>(
+ Mutable<ILogicalExpression> unnestArg = new MutableObject<>(
new VariableReferenceExpression(aggVar));
- List<Mutable<ILogicalExpression>> unnestArgList = new ArrayList<Mutable<ILogicalExpression>>();
+ List<Mutable<ILogicalExpression>> unnestArgList = new ArrayList<>();
unnestArgList.add(unnestArg);
- Mutable<ILogicalExpression> unnestExpr = new MutableObject<ILogicalExpression>(
+ Mutable<ILogicalExpression> unnestExpr = new MutableObject<>(
new UnnestingFunctionCallExpression(
FunctionUtil.getFunctionInfo(BuiltinFunctions.SCAN_COLLECTION), unnestArgList));
ILogicalOperator unnestOp = new UnnestOperator(unnestVar, unnestExpr);
- return new Pair<ILogicalOperator, LogicalVariable>(unnestOp, unnestVar);
+ return new Pair<>(unnestOp, unnestVar);
}
private ILogicalOperator createFieldAccessAssignOperator(LogicalVariable recordVar,
@@ -388,17 +386,16 @@
fieldAccessVars.add(newVar);
// fieldAcess expr
List<Mutable<ILogicalExpression>> argRefs = new ArrayList<>();
- argRefs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(recordVar)));
- argRefs.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
+ argRefs.add(new MutableObject<>(new VariableReferenceExpression(recordVar)));
+ argRefs.add(new MutableObject<>(new ConstantExpression(
new AsterixConstantValue(new AString(Integer.toString(inputLiveVar.getId()))))));
- fieldAccessExprs.add(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+ fieldAccessExprs.add(new MutableObject<>(new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_NAME), argRefs)));
// Updates variable mapping for ancestor operators.
updateInputToOutputVarMapping(inputLiveVar, newVar, false);
}
}
- AssignOperator fieldAccessAssignOp = new AssignOperator(fieldAccessVars, fieldAccessExprs);
- return fieldAccessAssignOp;
+ return new AssignOperator(fieldAccessVars, fieldAccessExprs);
}
@Override
@@ -429,15 +426,13 @@
correlatedKeyVars.clear();
correlatedKeyVars.addAll(primaryOpAndVars.second);
// Update key variables and input-output-var mapping.
- for (Map.Entry<LogicalVariable, LogicalVariable> entry : varMap.entrySet()) {
- LogicalVariable oldVar = entry.getKey();
- LogicalVariable newVar = entry.getValue();
+ varMap.forEach((oldVar, newVar) -> {
if (correlatedKeyVars.contains(oldVar)) {
correlatedKeyVars.remove(oldVar);
correlatedKeyVars.add(newVar);
}
updateInputToOutputVarMapping(oldVar, newVar, true);
- }
+ });
return primaryOpAndVars.first;
}
@@ -454,8 +449,8 @@
List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprList = new ArrayList<>();
// Adds keyVars to the prefix of sorting columns.
for (LogicalVariable keyVar : correlatedKeyVars) {
- orderExprList.add(new Pair<IOrder, Mutable<ILogicalExpression>>(OrderOperator.ASC_ORDER,
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(keyVar))));
+ orderExprList.add(
+ new Pair<>(OrderOperator.ASC_ORDER, new MutableObject<>(new VariableReferenceExpression(keyVar))));
}
orderExprList.addAll(op.getOrderExpressions());
@@ -636,7 +631,7 @@
return op;
}
GroupByOperator gbyOp = new GroupByOperator();
- // Creates a copy of correlatedKeyVars, to fix the ConcurrentModificationExcetpion in ASTERIXDB-1581.
+ // Creates a copy of correlatedKeyVars, to fix the ConcurrentModificationException in ASTERIXDB-1581.
List<LogicalVariable> copyOfCorrelatedKeyVars = new ArrayList<>(correlatedKeyVars);
for (LogicalVariable keyVar : copyOfCorrelatedKeyVars) {
// This limits the visitor can only be applied to a nested logical
@@ -652,7 +647,7 @@
ILogicalOperator inputOp = op.getInputs().get(0).getValue();
gbyOp.getInputs().add(new MutableObject<>(inputOp));
- NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(gbyOp));
+ NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<>(gbyOp));
op.getInputs().clear();
op.getInputs().add(new MutableObject<>(nts));
@@ -718,7 +713,7 @@
subplanInputVarToCurrentVarMap.put(oldVar, newVar);
currentVarToSubplanInputVarMap.put(newVar, oldVar);
} else {
- varMapIntroducedByRewriting.add(new Pair<LogicalVariable, LogicalVariable>(oldVar, newVar));
+ varMapIntroducedByRewriting.add(new Pair<>(oldVar, newVar));
}
}
@@ -726,8 +721,8 @@
List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprs) {
List<Pair<IOrder, Mutable<ILogicalExpression>>> clonedOrderExprs = new ArrayList<>();
for (Pair<IOrder, Mutable<ILogicalExpression>> orderExpr : orderExprs) {
- clonedOrderExprs.add(new Pair<IOrder, Mutable<ILogicalExpression>>(orderExpr.first,
- new MutableObject<ILogicalExpression>(orderExpr.second.getValue().cloneExpression())));
+ clonedOrderExprs.add(
+ new Pair<>(orderExpr.first, new MutableObject<>(orderExpr.second.getValue().cloneExpression())));
}
return clonedOrderExprs;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java
index 0bda955..8ab8b1d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java
@@ -48,9 +48,9 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
@@ -324,9 +324,7 @@
childrenRef,
context);
changed = changed || resultFromChild.first;
- for (Map.Entry<LogicalVariable, LogicalVariable> entry : resultFromChild.second.entrySet()) {
- LogicalVariable oldVar = entry.getKey();
- LogicalVariable newVar = entry.getValue();
+ resultFromChild.second.forEach((oldVar, newVar) -> {
if (liveVars.contains(oldVar)) {
// Maps live variables for its ancestors.
replacedVarMapForAncestor.put(oldVar, newVar);
@@ -337,7 +335,7 @@
oldVar = newVar;
}
}
- }
+ });
replacedVarMap.putAll(resultFromChild.second);
}
VariableUtilities.substituteVariables(op, replacedVarMap, context);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index c3f01e8..db6f985 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -154,13 +154,14 @@
DatasetDecl datasetStmt = (DatasetDecl) stmt;
Map<String, String> hints = datasetStmt.getHints();
if (hints != null && !hints.isEmpty()) {
- Pair<Boolean, String> validationResult = null;
- StringBuffer errorMsgBuffer = new StringBuffer();
+ StringBuilder errorMsgBuffer = new StringBuilder();
for (Entry<String, String> hint : hints.entrySet()) {
- validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
+ Pair<Boolean, String> validationResult = DatasetHints.validate(appCtx, hint.getKey(),
+ hint.getValue());
if (!validationResult.first) {
- errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
- + " error in processing hint: " + hint.getKey() + " " + validationResult.second);
+ errorMsgBuffer.append("Dataset: ").append(datasetStmt.getName().getValue())
+ .append(" error in processing hint: ").append(hint.getKey()).append(" ")
+ .append(validationResult.second);
errorMsgBuffer.append(" \n");
}
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 66a1073..d5f41df 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -25,7 +25,6 @@
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
-import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
@@ -39,7 +38,6 @@
import org.apache.asterix.common.exceptions.MetadataException;
import org.apache.asterix.common.functions.FunctionConstants;
import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.external.util.ExternalDataUtils;
import org.apache.asterix.lang.aql.util.RangeMapBuilder;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Expression.Kind;
@@ -82,7 +80,6 @@
import org.apache.asterix.metadata.declared.ResultSetDataSink;
import org.apache.asterix.metadata.declared.ResultSetSinkId;
import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.entities.Feed;
import org.apache.asterix.metadata.entities.Function;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.metadata.functions.ExternalFunctionCompilerUtil;
@@ -96,7 +93,6 @@
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
-import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledUpsertStatement;
import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import org.apache.asterix.translator.util.PlanTranslationUtil;
@@ -1487,12 +1483,12 @@
// Updates mapping like <$a, $b> in varMap to <$a, $c>, where there is a mapping <$b, $c>
// in childVarMap.
- for (Map.Entry<LogicalVariable, LogicalVariable> entry : varMap.entrySet()) {
+ varMap.entrySet().forEach(entry -> {
LogicalVariable newVar = childVarMap.get(entry.getValue());
if (newVar != null) {
entry.setValue(newVar);
}
- }
+ });
varMap.putAll(childVarMap);
++childIndex;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
index 4b78b93..567d587 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -427,9 +427,7 @@
int perNodeParallelismMax = parallelism / numNodes + 1;
List<String> allNodes = new ArrayList<>();
Set<String> selectedNodesWithOneMorePartition = new HashSet<>();
- for (Map.Entry<String, NodeControllerInfo> entry : ncMap.entrySet()) {
- allNodes.add(entry.getKey());
- }
+ ncMap.forEach((key, value) -> allNodes.add(key));
Random random = new Random();
for (int index = numNodesWithOneMorePartition; index >= 1; --index) {
int pick = random.nextInt(index);
@@ -439,9 +437,8 @@
// Generates cluster locations, which has duplicates for a node if it contains more than one partitions.
List<String> locations = new ArrayList<>();
- for (Map.Entry<String, NodeControllerInfo> entry : ncMap.entrySet()) {
- String nodeId = entry.getKey();
- int availableCores = entry.getValue().getNumAvailableCores();
+ ncMap.forEach((nodeId, value) -> {
+ int availableCores = value.getNumAvailableCores();
int nodeParallelism =
selectedNodesWithOneMorePartition.contains(nodeId) ? perNodeParallelismMax : perNodeParallelismMin;
int coresToUse =
@@ -449,17 +446,13 @@
for (int count = 0; count < coresToUse; ++count) {
locations.add(nodeId);
}
- }
+ });
return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[0]));
}
// Gets the total number of available cores in the cluster.
private static int getTotalNumCores(Map<String, NodeControllerInfo> ncMap) {
- int sum = 0;
- for (Map.Entry<String, NodeControllerInfo> entry : ncMap.entrySet()) {
- sum += entry.getValue().getNumAvailableCores();
- }
- return sum;
+ return ncMap.values().stream().mapToInt(NodeControllerInfo::getNumAvailableCores).sum();
}
// Gets the frame limit.
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
index 81e0cfd..8d028a5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
@@ -50,9 +50,7 @@
ICcApplicationContext props = (ICcApplicationContext) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
Map<String, String> buildProperties = props.getBuildProperties().getAllProps();
ObjectNode responseObject = OBJECT_MAPPER.createObjectNode();
- for (Map.Entry<String, String> e : buildProperties.entrySet()) {
- responseObject.put(e.getKey(), e.getValue());
- }
+ buildProperties.forEach(responseObject::put);
try {
HttpUtil.setContentType(response, HttpUtil.ContentType.TEXT_PLAIN, HttpUtil.Encoding.UTF8);
} catch (IOException e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
index 04e6313..a502de9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
@@ -499,7 +499,7 @@
int abortedJobId = txnContext.getJobId().getId();
// Obtain the first/last log record LSNs written by the Job
long firstLSN = txnContext.getFirstLSN();
- /**
+ /*
* The effect of any log record with LSN below minFirstLSN has already been written to disk and
* will not be rolled back. Therefore, we will set the first LSN of the job to the maximum of
* minFirstLSN and the job's first LSN.
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
index 80fdbd6..4ac1305 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
@@ -25,7 +25,6 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -67,7 +66,6 @@
import org.apache.asterix.util.FaultToleranceUtil;
import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
-import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class AutoFaultToleranceStrategy implements IFaultToleranceStrategy {
@@ -163,9 +161,8 @@
LOGGER.info("Partitions to recover: " + lostPartitions);
}
//For each replica, send a request to takeover the assigned partitions
- for (Entry<String, List<Integer>> entry : partitionRecoveryPlan.entrySet()) {
- String replica = entry.getKey();
- Integer[] partitionsToTakeover = entry.getValue().toArray(new Integer[entry.getValue().size()]);
+ partitionRecoveryPlan.forEach((replica, value) -> {
+ Integer[] partitionsToTakeover = value.toArray(new Integer[value.size()]);
long requestId = clusterRequestId++;
TakeoverPartitionsRequestMessage takeoverRequest = new TakeoverPartitionsRequestMessage(requestId,
replica, partitionsToTakeover);
@@ -180,7 +177,7 @@
*/
LOGGER.log(Level.WARNING, "Failed to send takeover request: " + takeoverRequest, e);
}
- }
+ });
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NodeFailbackPlan.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NodeFailbackPlan.java
index bc069b9..ab938d2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NodeFailbackPlan.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NodeFailbackPlan.java
@@ -163,11 +163,11 @@
*/
for (String participant : participants) {
Set<Integer> partitionToPrepareForFailback = new HashSet<>();
- for (Map.Entry<Integer, String> entry : partition2nodeMap.entrySet()) {
- if (entry.getValue().equals(participant)) {
- partitionToPrepareForFailback.add(entry.getKey());
+ partition2nodeMap.forEach((key, value) -> {
+ if (value.equals(participant)) {
+ partitionToPrepareForFailback.add(key);
}
- }
+ });
PreparePartitionsFailbackRequestMessage msg = new PreparePartitionsFailbackRequestMessage(planId,
requestId++, participant, partitionToPrepareForFailback);
if (participant.equals(nodeToReleaseMetadataManager)) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index d3be23c..e45df8b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -36,7 +36,6 @@
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutorService;
@@ -1981,9 +1980,7 @@
throw new AlgebricksException("Unable to read policy file" + cfps.getSourcePolicyFile(), e);
}
Map<String, String> policyProperties = new HashMap<>();
- for (Entry<Object, Object> entry : prop.entrySet()) {
- policyProperties.put((String) entry.getKey(), (String) entry.getValue());
- }
+ prop.forEach((key, value) -> policyProperties.put((String) key, (String) value));
newPolicy = new FeedPolicyEntity(dataverse, policy, description, policyProperties);
}
MetadataManager.INSTANCE.addFeedPolicy(mdTxnCtx, newPolicy);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index 2b9386e..9cce91b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -330,8 +330,7 @@
// copy connectors
connectorIdMapping.clear();
- for (Entry<ConnectorDescriptorId, IConnectorDescriptor> entry : subJob.getConnectorMap().entrySet()) {
- IConnectorDescriptor connDesc = entry.getValue();
+ subJob.getConnectorMap().forEach((key, connDesc) -> {
ConnectorDescriptorId newConnId;
if (connDesc instanceof MToNPartitioningConnectorDescriptor) {
MToNPartitioningConnectorDescriptor m2nConn = (MToNPartitioningConnectorDescriptor) connDesc;
@@ -341,8 +340,8 @@
} else {
newConnId = jobSpec.createConnectorDescriptor(connDesc);
}
- connectorIdMapping.put(entry.getKey(), newConnId);
- }
+ connectorIdMapping.put(key, newConnId);
+ });
// make connections between operators
for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>,
@@ -406,12 +405,12 @@
}
// set count constraints
- for (Entry<OperatorDescriptorId, Integer> entry : operatorCounts.entrySet()) {
- IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(entry.getKey());
- if (!operatorLocations.keySet().contains(entry.getKey())) {
- PartitionConstraintHelper.addPartitionCountConstraint(jobSpec, opDesc, entry.getValue());
+ operatorCounts.forEach((key, value) -> {
+ IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(key);
+ if (!operatorLocations.keySet().contains(key)) {
+ PartitionConstraintHelper.addPartitionCountConstraint(jobSpec, opDesc, value);
}
- }
+ });
// roots
for (OperatorDescriptorId root : subJob.getRoots()) {
jobSpec.addRoot(jobSpec.getOperatorMap().get(operatorIdMapping.get(root)));
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
index 559d57c..cdb7bb3 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
@@ -491,11 +491,8 @@
sb.append(String.format(idxHeaderFormat, "DatasetID", "ResourceID", "Open", "Reference Count", "Index"));
for (DatasetResource dsr : datasets.values()) {
DatasetInfo dsInfo = dsr.getDatasetInfo();
- for (Map.Entry<Long, IndexInfo> entry : dsInfo.getIndexes().entrySet()) {
- IndexInfo iInfo = entry.getValue();
- sb.append(String.format(idxFormat, dsInfo.getDatasetID(), entry.getKey(), iInfo.isOpen(),
- iInfo.getReferenceCount(), iInfo.getIndex()));
- }
+ dsInfo.getIndexes().forEach((key, iInfo) -> sb.append(String.format(idxFormat, dsInfo.getDatasetID(), key,
+ iInfo.isOpen(), iInfo.getReferenceCount(), iInfo.getIndex())));
}
outputStream.write(sb.toString().getBytes());
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
index 870a6df..5212892 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
@@ -23,6 +23,7 @@
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
@@ -80,14 +81,14 @@
*/
public String[] getLocationConstraints(InputSplit[] splits) throws HyracksException {
if (splits == null) {
- /** deal the case when the splits array is null */
+ /* deal the case when the splits array is null */
return new String[] {};
}
int[] workloads = new int[NCs.length];
Arrays.fill(workloads, 0);
String[] locations = new String[splits.length];
Map<String, IntWritable> locationToNumOfSplits = new HashMap<String, IntWritable>();
- /**
+ /*
* upper bound is number of splits
*/
int upperBoundSlots = splits.length;
@@ -96,7 +97,7 @@
Random random = new Random(System.currentTimeMillis());
boolean scheduled[] = new boolean[splits.length];
Arrays.fill(scheduled, false);
- /**
+ /*
* scan the splits and build the popularity map
* give the machines with less local splits more scheduling priority
*/
@@ -105,7 +106,7 @@
for (String location : locationToNumOfSplits.keySet()) {
locationToNumOfAssignement.put(location, 0);
}
- /**
+ /*
* push data-local upper-bounds slots to each machine
*/
scheduleLocalSlots(splits, workloads, locations, upperBoundSlots, random, scheduled, locationToNumOfSplits,
@@ -119,7 +120,7 @@
}
LOGGER.info("Data local rate: "
+ (scheduled.length == 0 ? 0.0 : ((float) dataLocalCount / (float) (scheduled.length))));
- /**
+ /*
* push non-data-local upper-bounds slots to each machine
*/
locationToNumOfAssignement.clear();
@@ -158,22 +159,15 @@
boolean[] scheduled, final HashMap<String, Integer> locationToNumOfAssignement)
throws IOException, UnknownHostException {
- PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(NCs.length, new Comparator<String>() {
- @Override
- public int compare(String s1, String s2) {
- return locationToNumOfAssignement.get(s1).compareTo(locationToNumOfAssignement.get(s2));
- }
+ PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(NCs.length,
+ Comparator.comparing(locationToNumOfAssignement::get));
- });
-
- for (String nc : NCs) {
- scheduleCadndiates.add(nc);
- }
- /**
+ scheduleCadndiates.addAll(Arrays.asList(NCs));
+ /*
* schedule no-local file reads
*/
for (int i = 0; i < splits.length; i++) {
- /** if there is no data-local NC choice, choose a random one */
+ /* if there is no data-local NC choice, choose a random one */
if (!scheduled[i]) {
String selectedNcName = scheduleCadndiates.remove();
if (selectedNcName != null) {
@@ -209,55 +203,48 @@
private void scheduleLocalSlots(InputSplit[] splits, int[] workloads, String[] locations, int slots, Random random,
boolean[] scheduled, final Map<String, IntWritable> locationToNumSplits,
final HashMap<String, Integer> locationToNumOfAssignement) throws IOException, UnknownHostException {
- /** scheduling candidates will be ordered inversely according to their popularity */
- PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(3, new Comparator<String>() {
- @Override
- public int compare(String s1, String s2) {
- int assignmentDifference = locationToNumOfAssignement.get(s1)
- .compareTo(locationToNumOfAssignement.get(s2));
- if (assignmentDifference != 0) {
- return assignmentDifference;
- }
- return locationToNumSplits.get(s1).compareTo(locationToNumSplits.get(s2));
+ /* scheduling candidates will be ordered inversely according to their popularity */
+ PriorityQueue<String> scheduleCadndiates = new PriorityQueue<>(3, (s1, s2) -> {
+ int assignmentDifference = locationToNumOfAssignement.get(s1).compareTo(locationToNumOfAssignement.get(s2));
+ if (assignmentDifference != 0) {
+ return assignmentDifference;
}
-
+ return locationToNumSplits.get(s1).compareTo(locationToNumSplits.get(s2));
});
for (int i = 0; i < splits.length; i++) {
if (scheduled[i]) {
continue;
}
- /**
+ /*
* get the location of all the splits
*/
String[] locs = splits[i].getLocations();
if (locs.length > 0) {
scheduleCadndiates.clear();
- for (int j = 0; j < locs.length; j++) {
- scheduleCadndiates.add(locs[j]);
- }
+ Collections.addAll(scheduleCadndiates, locs);
for (String candidate : scheduleCadndiates) {
- /**
+ /*
* get all the IP addresses from the name
*/
InetAddress[] allIps = InetAddress.getAllByName(candidate);
- /**
+ /*
* iterate overa all ips
*/
for (InetAddress ip : allIps) {
- /**
+ /*
* if the node controller exists
*/
if (ipToNcMapping.get(ip.getHostAddress()) != null) {
- /**
+ /*
* set the ncs
*/
List<String> dataLocations = ipToNcMapping.get(ip.getHostAddress());
int arrayPos = random.nextInt(dataLocations.size());
String nc = dataLocations.get(arrayPos);
int pos = ncNameToIndex.get(nc);
- /**
+ /*
* check if the node is already full
*/
if (workloads[pos] < slots) {
@@ -270,11 +257,11 @@
}
}
}
- /**
+ /*
* break the loop for data-locations if the schedule has
* already been found
*/
- if (scheduled[i] == true) {
+ if (scheduled[i]) {
break;
}
}
@@ -319,23 +306,19 @@
ncNameToIndex.clear();
int i = 0;
- /**
+ /*
* build the IP address to NC map
*/
for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().lookupIpAddress())
.getHostAddress();
- List<String> matchedNCs = ipToNcMapping.get(ipAddr);
- if (matchedNCs == null) {
- matchedNCs = new ArrayList<String>();
- ipToNcMapping.put(ipAddr, matchedNCs);
- }
+ List<String> matchedNCs = ipToNcMapping.computeIfAbsent(ipAddr, k -> new ArrayList<>());
matchedNCs.add(entry.getKey());
NCs[i] = entry.getKey();
i++;
}
- /**
+ /*
* set up the NC name to index mapping
*/
for (i = 0; i < NCs.length; i++) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
index 968dbf6..19a519f 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
@@ -22,7 +22,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -49,9 +48,7 @@
public List<Pair<String, String>> getAllLibraries() {
ArrayList<Pair<String, String>> libs = new ArrayList<>();
synchronized (libraryClassLoaders) {
- for (Entry<String, ClassLoader> entry : libraryClassLoaders.entrySet()) {
- libs.add(getDataverseAndLibararyName(entry.getKey()));;
- }
+ libraryClassLoaders.forEach((key, value) -> libs.add(getDataverseAndLibararyName(key)));
}
return libs;
}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
index 74020f3..3bbcf89 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
@@ -27,7 +27,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.asterix.external.api.IRawRecord;
import org.apache.asterix.external.classad.CaseInsensitiveString;
@@ -215,8 +214,7 @@
parser.setLexerSource(lexerSource);
parser.parseNext(pAd);
Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
- for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
- ExprTree tree = entry.getValue();
+ attrs.forEach((key, tree) -> {
switch (tree.getKind()) {
case ATTRREF_NODE:
case CLASSAD_NODE:
@@ -231,7 +229,7 @@
System.out.println("Something is wrong");
break;
}
- }
+ });
}
} finally {
recordReader.close();
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java
index 9107f63..e42ba51 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java
@@ -27,7 +27,6 @@
import java.util.Date;
import java.util.HashMap;
import java.util.List;
-import java.util.Map;
import org.apache.asterix.fuzzyjoin.invertedlist.InvertedListLengthList;
import org.apache.asterix.fuzzyjoin.invertedlist.InvertedListsLengthList;
@@ -176,9 +175,9 @@
// verify candidates
//
ArrayList<ResultJoin> results = new ArrayList<>();
- for (Map.Entry<Integer, Integer> cand : counts.entrySet()) {
- int count = cand.getValue();
- int indexProbe = cand.getKey();
+ counts.forEach((key, value) -> {
+ int count = value;
+ int indexProbe = key;
if (count > 0) {
int tokensProbe[] = records.get(indexProbe);
float similarity = similarityFilters.passSimilarityFilter(tokens, prefixLength, tokensProbe,
@@ -187,7 +186,7 @@
results.add(new ResultJoin(indexProbe, similarity));
}
}
- }
+ });
return results;
}
@@ -293,9 +292,9 @@
// verify candidates
//
ArrayList<ResultSelfJoin> results = new ArrayList<>();
- for (Map.Entry<Integer, Integer> cand : counts.entrySet()) {
- int count = cand.getValue();
- int indexProbe = cand.getKey();
+ counts.forEach((key, value) -> {
+ int count = value;
+ int indexProbe = key;
if (count > 0) {
int tokensProbe[] = records.get(indexProbe);
float similarity = similarityFilters.passSimilarityFilter(tokens, prefixLength, tokensProbe,
@@ -304,7 +303,7 @@
results.add(new ResultSelfJoin(index, indexProbe, similarity));
}
}
- }
+ });
return results;
}
}
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
index 62b34c0..64c7e52 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
@@ -30,7 +30,6 @@
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
-import java.util.Map;
import org.apache.asterix.fuzzyjoin.tokenizer.Tokenizer;
import org.apache.asterix.fuzzyjoin.tokenizer.TokenizerFactory;
@@ -95,9 +94,7 @@
input.close();
ArrayList<TokenCount> tokenCounts = new ArrayList<TokenCount>();
- for (Map.Entry<String, MutableInteger> entry : tokenCount.entrySet()) {
- tokenCounts.add(new TokenCount(entry.getKey(), entry.getValue()));
- }
+ tokenCount.forEach((key, value) -> tokenCounts.add(new TokenCount(key, value)));
Collections.sort(tokenCounts);
BufferedWriter outputTokens = new BufferedWriter(new FileWriter(tokensFileName));
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/rewrites/VariableSubstitutionEnvironment.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/rewrites/VariableSubstitutionEnvironment.java
index 26f528c..a8650dd 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/rewrites/VariableSubstitutionEnvironment.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/rewrites/VariableSubstitutionEnvironment.java
@@ -33,9 +33,7 @@
}
public VariableSubstitutionEnvironment(Map<VariableExpr, Expression> varExprMap) {
- for (Map.Entry<VariableExpr, Expression> entry : varExprMap.entrySet()) {
- oldVarToNewExpressionMap.put(entry.getKey().getVar().getValue(), entry.getValue());
- }
+ varExprMap.forEach((key, value) -> oldVarToNewExpressionMap.put(key.getVar().getValue(), value));
}
public VariableSubstitutionEnvironment(VariableSubstitutionEnvironment env) {
diff --git a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java
index 4c750e2..17a71f2 100644
--- a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java
+++ b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java
@@ -216,7 +216,7 @@
writer.newLine();
writer.newLine();
- // Extinsibles
+ // Extensibles
for (Entry<String, Pair<String, String>> entry : extensibles.entrySet()) {
writer.newLine();
String signature = entry.getKey();
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerNode.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerNode.java
index 4f24b1e..1820be6 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerNode.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerNode.java
@@ -40,12 +40,8 @@
public LexerNode clone() {
LexerNode node = new LexerNode();
node.finalTokenName = this.finalTokenName;
- for (Map.Entry<Rule, LexerNode> entry : this.actions.entrySet()) {
- node.actions.put(entry.getKey().clone(), entry.getValue().clone());
- }
- for (String ongoing : this.ongoingParsing) {
- node.ongoingParsing.add(ongoing);
- }
+ this.actions.forEach((key, value) -> node.actions.put(key.clone(), value.clone()));
+ node.ongoingParsing.addAll(this.ongoingParsing);
return node;
}
@@ -59,9 +55,7 @@
if (actions.size() == 0) {
add(newRule);
} else {
- for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
- action.getValue().append(newRule);
- }
+ actions.forEach((key, value) -> value.append(newRule));
if (actions.containsKey(new RuleEpsilon())) {
actions.remove(new RuleEpsilon());
add(newRule);
@@ -84,9 +78,7 @@
throw new Exception("Rule conflict between: " + this.finalTokenName + " and " + newNode.finalTokenName);
}
}
- for (String ongoing : newNode.ongoingParsing) {
- this.ongoingParsing.add(ongoing);
- }
+ this.ongoingParsing.addAll(newNode.ongoingParsing);
}
public void append(LexerNode node) throws Exception {
@@ -110,9 +102,7 @@
this.finalTokenName = name;
} else {
ongoingParsing.add(TOKEN_PREFIX + name);
- for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
- action.getValue().appendTokenName(name);
- }
+ actions.forEach((key, value) -> value.appendTokenName(name));
}
}
@@ -130,7 +120,7 @@
if (finalTokenName != null)
result.append("! ");
if (actions.size() == 1)
- result.append(actions.keySet().toArray()[0].toString() + actions.values().toArray()[0].toString());
+ result.append(actions.keySet().toArray()[0].toString()).append(actions.values().toArray()[0].toString());
if (actions.size() > 1) {
result.append(" ( ");
for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
@@ -146,7 +136,7 @@
}
public String toJava() {
- StringBuffer result = new StringBuffer();
+ StringBuilder result = new StringBuilder();
if (numberOfRuleChar() > 2) {
result.append(toJavaSingleCharRules());
result.append(toJavaComplexRules(false));
@@ -154,10 +144,10 @@
result.append(toJavaComplexRules(true));
}
if (this.finalTokenName != null) {
- result.append("return " + TOKEN_PREFIX + finalTokenName + ";\n");
+ result.append("return ").append(TOKEN_PREFIX).append(finalTokenName).append(";\n");
} else if (ongoingParsing != null) {
String ongoingParsingArgs = collectionJoin(ongoingParsing, ',');
- result.append("return parseError(" + ongoingParsingArgs + ");\n");
+ result.append("return parseError(").append(ongoingParsingArgs).append(");\n");
}
return result.toString();
}
@@ -172,12 +162,12 @@
}
private String toJavaSingleCharRules() {
- StringBuffer result = new StringBuffer();
+ StringBuilder result = new StringBuilder();
result.append("switch(currentChar){\n");
for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
if (action.getKey() instanceof RuleChar) {
RuleChar rule = (RuleChar) action.getKey();
- result.append("case '" + rule.expectedChar() + "':\n");
+ result.append("case '").append(rule.expectedChar()).append("':\n");
result.append(rule.javaAction()).append("\n");
result.append(action.getValue().toJava());
}
@@ -187,7 +177,7 @@
}
private String toJavaComplexRules(boolean all) {
- StringBuffer result = new StringBuffer();
+ StringBuilder result = new StringBuilder();
for (Map.Entry<Rule, LexerNode> action : actions.entrySet()) {
if (!all && action.getKey() instanceof RuleChar)
continue;
@@ -244,7 +234,7 @@
ongoingParsingArgs.append(token);
ongoingParsingArgs.append(c);
}
- if (ongoingParsing.size() > 0) {
+ if (!ongoingParsing.isEmpty()) {
ongoingParsingArgs.deleteCharAt(ongoingParsingArgs.length() - 1);
}
return ongoingParsingArgs.toString();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
index 08ca03b..8f88ca7 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataCache.java
@@ -376,9 +376,7 @@
if (m == null) {
return retDatasets;
}
- for (Map.Entry<String, Dataset> entry : m.entrySet()) {
- retDatasets.add(entry.getValue());
- }
+ m.forEach((key, value) -> retDatasets.add(value));
return retDatasets;
}
}
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
index da45e42..126114b 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
@@ -509,7 +509,7 @@
i++;
}
- /**
+ /*
* establish log replication handshake
*/
ByteBuffer handshakeBuffer = ByteBuffer.allocate(ReplicationProtocol.REPLICATION_REQUEST_TYPE_SIZE)
@@ -563,7 +563,7 @@
LOGGER.info("TxnLogReplicator thread was terminated.");
}
- /**
+ /*
* End log replication handshake (by sending a dummy log with a single byte)
*/
ByteBuffer endLogRepHandshake = ByteBuffer.allocate(Integer.SIZE + 1).putInt(1).put((byte) 0);
@@ -602,7 +602,7 @@
}
}
- /**
+ /*
* Close log replication sockets
*/
ByteBuffer goodbyeBuffer = ReplicationProtocol.getGoodbyeBuffer();
@@ -914,7 +914,7 @@
//stop replication thread afters all jobs/logs have been processed
suspendReplication(false);
- /**
+ /*
* If this node has any remote replicas, it needs to inform them
* that it is shutting down.
*/
@@ -923,7 +923,7 @@
sendShutdownNotifiction();
}
- /**
+ /*
* If this node has any remote primary replicas, then it needs to wait
* until all of them send the shutdown notification.
*/
@@ -1051,7 +1051,7 @@
ReplicationProtocol.sendGoodbye(socketChannel);
}
- /**
+ /*
* 4. update the LSN_MAP for indexes that were not flushed
* to the current append LSN to indicate no operations happened
* since the checkpoint start.
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
index 7c7a050..df17987 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
@@ -97,10 +97,10 @@
Map<String, Set<String>> recoveryList = new HashMap<>();
//3. find best candidate to recover from per lost replica data
- for (Entry<String, Set<String>> entry : recoveryCandidates.entrySet()) {
+ recoveryCandidates.forEach((key, value) -> {
int winnerScore = -1;
String winner = "";
- for (String node : entry.getValue()) {
+ for (String node : value) {
int nodeScore = candidatesScore.get(node);
@@ -111,14 +111,14 @@
}
if (recoveryList.containsKey(winner)) {
- recoveryList.get(winner).add(entry.getKey());
+ recoveryList.get(winner).add(key);
} else {
Set<String> nodesToRecover = new HashSet<>();
- nodesToRecover.add(entry.getKey());
+ nodesToRecover.add(key);
recoveryList.put(winner, nodesToRecover);
}
- }
+ });
return recoveryList;
}
@@ -146,7 +146,7 @@
@Override
public void takeoverPartitons(Integer[] partitions) throws IOException, ACIDException {
- /**
+ /*
* TODO even though the takeover is always expected to succeed,
* in case of any failure during the takeover, the CC should be
* notified that the takeover failed.
@@ -227,7 +227,7 @@
(ReplicaResourcesManager) runtimeContext.getReplicaResourcesManager();
Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties().getNodePartitions();
- /**
+ /*
* for each lost partition, get the remaining files from replicas
* to complete the failback process.
*/
@@ -251,7 +251,7 @@
replicationManager.requestReplicaFiles(replicaId, partitionsToRecover, existingFiles);
}
} catch (IOException e) {
- /**
+ /*
* in case of failure during failback completion process we need to construct a new plan
* and get all the files from the start since the remote replicas will change in the new plan.
*/
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index 6e55fd2..ac83b71 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -462,11 +462,11 @@
private void updateNodeConfig(String nodeId, Map<IOption, Object> configuration) {
ConfigManager configManager = ((ConfigManagerApplicationConfig) appCtx.getServiceContext().getAppConfig())
.getConfigManager();
- for (Map.Entry<IOption, Object> entry : configuration.entrySet()) {
- if (entry.getKey().section() == Section.NC) {
- configManager.set(nodeId, entry.getKey(), entry.getValue());
+ configuration.forEach((key, value) -> {
+ if (key.section() == Section.NC) {
+ configManager.set(nodeId, key, value);
}
- }
+ });
}
}
diff --git a/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java b/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java
index b3f26fd..d2f52dc 100644
--- a/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java
+++ b/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java
@@ -41,15 +41,15 @@
private TemplateHelper() {
registerReplacement(LoremIpsumReplacement.INSTANCE);
- StringBuffer pattern = null;
- for (Map.Entry<String, TemplateReplacement> entry : replacements.entrySet()) {
- if (pattern == null) {
- pattern = new StringBuffer("%(");
+ StringBuilder pattern = new StringBuilder();
+ replacements.forEach((key, value) -> {
+ if (pattern.length() == 0) {
+ pattern.append("%(");
} else {
pattern.append("|");
}
- pattern.append(entry.getKey());
- }
+ pattern.append(key);
+ });
pattern.append(")[^%]*%");
replacementPattern = Pattern.compile(pattern.toString());
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 85ad6b4..5666b48 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -34,7 +34,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentHashMap;
@@ -128,9 +127,8 @@
.append(Character.LINE_SEPARATOR).append(ioManager.getClass().getSimpleName()).append(':')
.append(Character.LINE_SEPARATOR).append(ioManager.toString()).append(Character.LINE_SEPARATOR)
.append("Cached Resources:").append(Character.LINE_SEPARATOR);
- for (Entry<String, LocalResource> pair : resourceCache.asMap().entrySet()) {
- aString.append(pair.getKey()).append("->").append(pair.getValue()).append(Character.LINE_SEPARATOR);
- }
+ resourceCache.asMap().forEach(
+ (key, value) -> aString.append(key).append("->").append(value).append(Character.LINE_SEPARATOR));
return aString.toString();
}
diff --git a/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java b/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java
index 47cf3b3..4a900e1 100644
--- a/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java
+++ b/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java
@@ -230,10 +230,10 @@
LOG.info("Dump debug output");
Map<String, String> envs = System.getenv();
- for (Map.Entry<String, String> env : envs.entrySet()) {
- LOG.info("System env: key=" + env.getKey() + ", val=" + env.getValue());
- System.out.println("System env: key=" + env.getKey() + ", val=" + env.getValue());
- }
+ envs.forEach((key, value) -> {
+ LOG.info("System env: key=" + key + ", val=" + value);
+ System.out.println("System env: key=" + key + ", val=" + value);
+ });
String cmd = "ls -alhLR";
Runtime run = Runtime.getRuntime();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java
index 8f55632..689d51c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ConstantExpression.java
@@ -22,7 +22,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -172,10 +171,7 @@
@Override
public AbstractLogicalExpression cloneExpression() {
Map<Object, IExpressionAnnotation> m = new HashMap<>();
- for (Entry<Object, IExpressionAnnotation> entry : annotationMap.entrySet()) {
- IExpressionAnnotation annot2 = entry.getValue().copy();
- m.put(entry.getKey(), annot2);
- }
+ annotationMap.forEach((key, value1) -> m.put(key, value1.copy()));
ConstantExpression c = new ConstantExpression(value);
c.annotationMap = m;
return c;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
index 4843f81..f800be8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
@@ -25,7 +25,6 @@
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
@@ -49,11 +48,11 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -662,26 +661,24 @@
// equivalent
// class should still be propagated (kept).
Set<LogicalVariable> usedVarSet = new HashSet<LogicalVariable>(usedVariables);
- for (Entry<LogicalVariable, EquivalenceClass> entry : chldClasses.entrySet()) {
- EquivalenceClass ec = entry.getValue();
+ chldClasses.forEach((key, ec) -> {
for (ILogicalExpression expr : ec.getExpressionMembers()) {
- Set<LogicalVariable> exprUsedVars = new HashSet<LogicalVariable>();
+ Set<LogicalVariable> exprUsedVars = new HashSet<>();
expr.getUsedVariables(exprUsedVars);
exprUsedVars.retainAll(usedVarSet);
// Check if the expression member uses a used variable.
if (!exprUsedVars.isEmpty()) {
- for (LogicalVariable v : ec.getMembers()) {
+ // If variable members contain a used variable, the representative variable should be a used
+ // variable.
+ ec.getMembers().forEach(v -> {
eqClasses.put(v, ec);
- // If variable members contain a used variable, the
- // representative
- // variable should be a used variable.
if (usedVarSet.contains(v)) {
ec.setVariableRepresentative(v);
}
- }
+ });
}
}
- }
+ });
List<FunctionalDependency> chldFds = getOrComputeFDs(op2, ctx);
for (FunctionalDependency fd : chldFds) {
@@ -786,8 +783,7 @@
LogicalVariable var = assignVars.get(assignVarIndex);
ILogicalExpression expr = assignExprs.get(assignVarIndex).getValue();
Map<LogicalVariable, EquivalenceClass> newVarEqcMap = new HashMap<LogicalVariable, EquivalenceClass>();
- for (Entry<LogicalVariable, EquivalenceClass> entry : eqClasses.entrySet()) {
- EquivalenceClass eqc = entry.getValue();
+ eqClasses.forEach((key, eqc) -> {
// If the equivalence class contains the right-hand-side
// expression,
// the left-hand-side variable is added into the equivalence
@@ -795,9 +791,9 @@
if (eqc.contains(expr)) {
eqc.addMember(var);
newVarEqcMap.put(var, eqc); // Add var as a map key for the
- // equivalence class.
+ // equivalence class.
}
- }
+ });
eqClasses.putAll(newVarEqcMap);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index b3b9da1..0c53685 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -23,6 +23,7 @@
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Objects;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -37,11 +38,11 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -633,15 +634,13 @@
variableMapping.clear();
IsomorphismUtilities.mapVariablesTopDown(op, argOp, variableMapping);
- List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
- if (argOp.getInputs().size() > 0) {
- for (int i = 0; i < argOp.getInputs().size(); i++) {
- VariableUtilities.getLiveVariables(argOp.getInputs().get(i).getValue(), liveVars);
- }
+ List<LogicalVariable> liveVars = new ArrayList<>();
+ for (int i = 0; i < argOp.getInputs().size(); i++) {
+ VariableUtilities.getLiveVariables(argOp.getInputs().get(i).getValue(), liveVars);
}
- List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+ List<LogicalVariable> producedVars = new ArrayList<>();
VariableUtilities.getProducedVariables(argOp, producedVars);
- List<LogicalVariable> producedVarsNew = new ArrayList<LogicalVariable>();
+ List<LogicalVariable> producedVarsNew = new ArrayList<>();
VariableUtilities.getProducedVariables(op, producedVarsNew);
if (producedVars.size() != producedVarsNew.size()) {
@@ -671,14 +670,7 @@
}
private static boolean variableEqual(LogicalVariable var, LogicalVariable varArg) {
- if (var == null && varArg == null) {
- return true;
- }
- if (var.equals(varArg)) {
- return true;
- } else {
- return false;
- }
+ return Objects.equals(var, varArg);
}
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
index 52d8e64..2caa252 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
@@ -22,7 +22,6 @@
import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
@@ -34,17 +33,16 @@
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
@@ -435,11 +433,9 @@
}
private Mutable<ILogicalExpression> copyExpressionAndSubtituteVars(Mutable<ILogicalExpression> expr) {
- ILogicalExpression copy = ((AbstractLogicalExpression) expr.getValue()).cloneExpression();
- for (Entry<LogicalVariable, LogicalVariable> entry : variableMapping.entrySet()) {
- copy.substituteVar(entry.getKey(), entry.getValue());
- }
- return new MutableObject<ILogicalExpression>(copy);
+ ILogicalExpression copy = expr.getValue().cloneExpression();
+ variableMapping.forEach(copy::substituteVar);
+ return new MutableObject<>(copy);
}
private void mapVariablesForUnion(ILogicalOperator op, ILogicalOperator arg) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
index cef387a..24c5162 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalExpressionDeepCopyWithNewVariablesVisitor.java
@@ -65,10 +65,7 @@
private void deepCopyAnnotations(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
Map<Object, IExpressionAnnotation> srcAnnotations = src.getAnnotations();
Map<Object, IExpressionAnnotation> destAnnotations = dest.getAnnotations();
- for (Map.Entry<Object, IExpressionAnnotation> annotationEntry : srcAnnotations.entrySet()) {
- IExpressionAnnotation annotation = annotationEntry.getValue().copy();
- destAnnotations.put(annotationEntry.getKey(), annotation);
- }
+ srcAnnotations.forEach((key, value) -> destAnnotations.put(key, value.copy()));
}
private void deepCopyOpaqueParameters(AbstractFunctionCallExpression src, AbstractFunctionCallExpression dest) {
@@ -85,7 +82,7 @@
public MutableObject<ILogicalExpression> deepCopyExpressionReference(Mutable<ILogicalExpression> exprRef)
throws AlgebricksException {
- return new MutableObject<ILogicalExpression>(deepCopy(exprRef.getValue()));
+ return new MutableObject<>(deepCopy(exprRef.getValue()));
}
// TODO return List<...>
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
index 934577f..a75e7d6 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
@@ -22,7 +22,6 @@
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
@@ -54,6 +53,7 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
@@ -65,7 +65,6 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
@@ -179,7 +178,7 @@
private Mutable<ILogicalOperator> deepCopyOperatorReference(Mutable<ILogicalOperator> opRef, ILogicalOperator arg)
throws AlgebricksException {
- return new MutableObject<ILogicalOperator>(deepCopy(opRef.getValue(), arg));
+ return new MutableObject<>(deepCopy(opRef.getValue(), arg));
}
private List<Mutable<ILogicalOperator>> deepCopyOperatorReferenceList(List<Mutable<ILogicalOperator>> list,
@@ -280,18 +279,18 @@
}
public void updatePrimaryKeys(IOptimizationContext context) {
- for (Map.Entry<LogicalVariable, LogicalVariable> entry : inputVarToOutputVarMapping.entrySet()) {
- List<LogicalVariable> primaryKey = context.findPrimaryKey(entry.getKey());
+ inputVarToOutputVarMapping.forEach((key, value) -> {
+ List<LogicalVariable> primaryKey = context.findPrimaryKey(key);
if (primaryKey != null) {
- List<LogicalVariable> head = new ArrayList<LogicalVariable>();
+ List<LogicalVariable> head = new ArrayList<>();
for (LogicalVariable variable : primaryKey) {
head.add(inputVarToOutputVarMapping.get(variable));
}
- List<LogicalVariable> tail = new ArrayList<LogicalVariable>(1);
- tail.add(entry.getValue());
+ List<LogicalVariable> tail = new ArrayList<>(1);
+ tail.add(value);
context.addPrimaryKey(new FunctionalDependency(head, tail));
}
- }
+ });
}
public LogicalVariable varCopy(LogicalVariable var) throws AlgebricksException {
@@ -398,7 +397,7 @@
public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
throws AlgebricksException {
Mutable<ILogicalOperator> dataSourceReference = arg == null ? op.getDataSourceReference()
- : new MutableObject<ILogicalOperator>(arg);
+ : new MutableObject<>(arg);
NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(dataSourceReference);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
index 641557c..f59638c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/UnorderedPartitionedProperty.java
@@ -70,11 +70,11 @@
@Override
public void substituteColumnVars(Map<LogicalVariable, LogicalVariable> varMap) {
- for (Map.Entry<LogicalVariable, LogicalVariable> var : varMap.entrySet()) {
- if (columnSet.remove(var.getKey())) {
- columnSet.add(var.getValue());
+ varMap.forEach((key, value) -> {
+ if (columnSet.remove(key)) {
+ columnSet.add(value);
}
- }
+ });
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
index 2960903..f817cd6 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
@@ -22,7 +22,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
@@ -98,13 +97,10 @@
}
private void reviseEdges(IHyracksJobBuilder builder) {
- /**
+ /*
* revise the edges for the case of replicate operator
*/
- for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : operatorVisitedToParents
- .entrySet()) {
- Mutable<ILogicalOperator> child = entry.getKey();
- List<Mutable<ILogicalOperator>> parents = entry.getValue();
+ operatorVisitedToParents.forEach((child, parents) -> {
if (parents.size() > 1) {
if (child.getValue().getOperatorTag() == LogicalOperatorTag.REPLICATE
|| child.getValue().getOperatorTag() == LogicalOperatorTag.SPLIT) {
@@ -113,7 +109,8 @@
// make the order of the graph edges consistent with the order of rop's outputs
List<Mutable<ILogicalOperator>> outputs = rop.getOutputs();
for (Mutable<ILogicalOperator> parent : parents) {
- builder.contributeGraphEdge(child.getValue(), outputs.indexOf(parent), parent.getValue(), 0);
+ builder.contributeGraphEdge(child.getValue(), outputs.indexOf(parent), parent.getValue(),
+ 0);
}
} else {
int i = 0;
@@ -124,6 +121,6 @@
}
}
}
- }
+ });
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
index 1bbda27..9c452bf 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
@@ -22,7 +22,6 @@
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
public class DotFormatBuilder {
@@ -129,9 +128,7 @@
public String toString() {
StringBuilder nodeString = new StringBuilder();
nodeString.append(nodeId).append(" [");
- for (Map.Entry attribute : attributes.entrySet()) {
- nodeString.append(attribute.getKey()).append("=").append(attribute.getValue()).append(",");
- }
+ attributes.forEach((key, value) -> nodeString.append(key).append("=").append(value).append(","));
// remove last ","
if (nodeString.charAt(nodeString.length() - 1) == ',') {
nodeString.deleteCharAt(nodeString.length() - 1);
@@ -186,9 +183,7 @@
public String toString() {
StringBuilder edgeString = new StringBuilder();
edgeString.append(source.getNodeId()).append("->").append(destination.getNodeId()).append(" [");
- for (Map.Entry attribute : attributes.entrySet()) {
- edgeString.append(attribute.getKey()).append("=").append(attribute.getValue()).append(",");
- }
+ attributes.forEach((key, value) -> edgeString.append(key).append("=").append(value).append(","));
// remove last ","
if (edgeString.charAt(edgeString.length() - 1) == ',') {
edgeString.deleteCharAt(edgeString.length() - 1);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 1c2b9b5..084626e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -103,12 +103,8 @@
public void finish() {
Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap = jag
.getConnectorActivityMap();
- for (Map.Entry<ConnectorDescriptorId, Pair<IActivity, Integer>> e : connectorProducerMap.entrySet()) {
- ConnectorDescriptorId cdId = e.getKey();
- Pair<IActivity, Integer> producer = e.getValue();
- Pair<IActivity, Integer> consumer = connectorConsumerMap.get(cdId);
- caMap.put(cdId, Pair.of(producer, consumer));
- }
+ connectorProducerMap
+ .forEach((cdId, producer) -> caMap.put(cdId, Pair.of(producer, connectorConsumerMap.get(cdId))));
}
private <K, V> void addToValueSet(Map<K, Set<V>> map, K n1, V n2) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
index 2cf96c2..9f66080 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
@@ -309,11 +309,7 @@
}
private <K, V> void insertIntoIndexedMap(Map<K, List<V>> map, K key, int index, V value) {
- List<V> vList = map.get(key);
- if (vList == null) {
- vList = new ArrayList<>();
- map.put(key, vList);
- }
+ List<V> vList = map.computeIfAbsent(key, k -> new ArrayList<>());
extend(vList, index);
vList.set(index, value);
}
@@ -322,9 +318,9 @@
public String toString() {
StringBuilder buffer = new StringBuilder();
- for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
- buffer.append(e.getKey().getId()).append(" : ").append(e.getValue().toString()).append("\n");
- List<IConnectorDescriptor> inputs = opInputMap.get(e.getKey());
+ opMap.forEach((key, value) -> {
+ buffer.append(key.getId()).append(" : ").append(value.toString()).append("\n");
+ List<IConnectorDescriptor> inputs = opInputMap.get(key);
if (inputs != null && !inputs.isEmpty()) {
buffer.append(" Inputs:\n");
for (IConnectorDescriptor c : inputs) {
@@ -332,7 +328,7 @@
.append("\n");
}
}
- List<IConnectorDescriptor> outputs = opOutputMap.get(e.getKey());
+ List<IConnectorDescriptor> outputs = opOutputMap.get(key);
if (outputs != null && !outputs.isEmpty()) {
buffer.append(" Outputs:\n");
for (IConnectorDescriptor c : outputs) {
@@ -340,7 +336,7 @@
.append("\n");
}
}
- }
+ });
buffer.append("\n").append("Constraints:\n").append(userConstraints);
@@ -352,8 +348,8 @@
ObjectNode jjob = om.createObjectNode();
ArrayNode jopArray = om.createArrayNode();
- for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
- ObjectNode op = e.getValue().toJSON();
+ opMap.forEach((key, value) -> {
+ ObjectNode op = value.toJSON();
if (!userConstraints.isEmpty()) {
// Add operator partition constraints to each JSON operator.
ObjectNode pcObject = om.createObjectNode();
@@ -364,12 +360,12 @@
ExpressionTag tag = constraint.getLValue().getTag();
if (tag == ExpressionTag.PARTITION_COUNT) {
PartitionCountExpression pce = (PartitionCountExpression) constraint.getLValue();
- if (e.getKey() == pce.getOperatorDescriptorId()) {
+ if (key == pce.getOperatorDescriptorId()) {
pcObject.put("count", getConstraintExpressionRValue(constraint));
}
} else if (tag == ExpressionTag.PARTITION_LOCATION) {
PartitionLocationExpression ple = (PartitionLocationExpression) constraint.getLValue();
- if (e.getKey() == ple.getOperatorDescriptorId()) {
+ if (key == ple.getOperatorDescriptorId()) {
pleObject.put(Integer.toString(ple.getPartition()),
getConstraintExpressionRValue(constraint));
}
@@ -383,23 +379,23 @@
}
}
jopArray.add(op);
- }
+ });
jjob.set("operators", jopArray);
ArrayNode jcArray = om.createArrayNode();
- for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
+ connMap.forEach((key, value) -> {
ObjectNode conn = om.createObjectNode();
Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection =
- connectorOpMap.get(e.getKey());
+ connectorOpMap.get(key);
if (connection != null) {
conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString());
conn.put("in-operator-port", connection.getLeft().getRight().intValue());
conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
conn.put("out-operator-port", connection.getRight().getRight().intValue());
}
- conn.set("connector", e.getValue().toJSON());
+ conn.set("connector", value.toJSON());
jcArray.add(conn);
- }
+ });
jjob.set("connectors", jcArray);
return jjob;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
index 20f128d..7cdb300 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
@@ -29,7 +29,6 @@
import java.util.Set;
import org.apache.commons.lang3.tuple.Pair;
-
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
import org.apache.hyracks.api.dataflow.IActivity;
@@ -37,7 +36,6 @@
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.job.ActivityCluster;
import org.apache.hyracks.api.job.ActivityClusterGraph;
-import org.apache.hyracks.api.job.ActivityClusterId;
import org.apache.hyracks.api.rewriter.runtime.SuperActivity;
/**
@@ -65,12 +63,10 @@
acg.getActivityMap().clear();
acg.getConnectorMap().clear();
Map<IActivity, SuperActivity> invertedActivitySuperActivityMap = new HashMap<>();
- for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
- rewriteIntraActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
- }
- for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
- rewriteInterActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
- }
+ acg.getActivityClusterMap()
+ .forEach((key, value) -> rewriteIntraActivityCluster(value, invertedActivitySuperActivityMap));
+ acg.getActivityClusterMap()
+ .forEach((key, value) -> rewriteInterActivityCluster(value, invertedActivitySuperActivityMap));
invertedActivitySuperActivityMap.clear();
}
@@ -84,14 +80,11 @@
Map<IActivity, SuperActivity> invertedActivitySuperActivityMap) {
Map<ActivityId, Set<ActivityId>> blocked2BlockerMap = ac.getBlocked2BlockerMap();
Map<ActivityId, ActivityId> invertedAid2SuperAidMap = new HashMap<>();
- for (Entry<IActivity, SuperActivity> entry : invertedActivitySuperActivityMap.entrySet()) {
- invertedAid2SuperAidMap.put(entry.getKey().getActivityId(), entry.getValue().getActivityId());
- }
+ invertedActivitySuperActivityMap
+ .forEach((key, value) -> invertedAid2SuperAidMap.put(key.getActivityId(), value.getActivityId()));
Map<ActivityId, Set<ActivityId>> replacedBlocked2BlockerMap = new HashMap<>();
- for (Entry<ActivityId, Set<ActivityId>> entry : blocked2BlockerMap.entrySet()) {
- ActivityId blocked = entry.getKey();
+ blocked2BlockerMap.forEach((blocked, blockers) -> {
ActivityId replacedBlocked = invertedAid2SuperAidMap.get(blocked);
- Set<ActivityId> blockers = entry.getValue();
Set<ActivityId> replacedBlockers = null;
if (blockers != null) {
replacedBlockers = new HashSet<>();
@@ -113,7 +106,7 @@
replacedBlocked2BlockerMap.put(replacedBlocked, existingBlockers);
}
}
- }
+ });
blocked2BlockerMap.clear();
blocked2BlockerMap.putAll(replacedBlocked2BlockerMap);
}
@@ -136,23 +129,21 @@
Map<ActivityId, SuperActivity> superActivities = new HashMap<>();
Map<ActivityId, Queue<IActivity>> toBeExpendedMap = new HashMap<>();
- /**
+ /*
* Build the initial super activities
*/
- for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
- ActivityId activityId = entry.getKey();
- IActivity activity = entry.getValue();
+ activities.forEach((activityId, activity) -> {
if (activityInputMap.get(activityId) == null) {
startActivities.put(activityId, activity);
- /**
+ /*
* use the start activity's id as the id of the super activity
*/
createNewSuperActivity(ac, superActivities, toBeExpendedMap, invertedActivitySuperActivityMap,
activityId, activity);
}
- }
+ });
- /**
+ /*
* expand one-to-one connected activity cluster by the BFS order.
* after the while-loop, the original activities are partitioned
* into equivalent classes, one-per-super-activity.
@@ -165,19 +156,19 @@
ActivityId superActivityId = entry.getKey();
SuperActivity superActivity = entry.getValue();
- /**
+ /*
* for the case where the super activity has already been swallowed
*/
if (superActivities.get(superActivityId) == null) {
continue;
}
- /**
+ /*
* expend the super activity
*/
Queue<IActivity> toBeExpended = toBeExpendedMap.get(superActivityId);
if (toBeExpended == null) {
- /**
+ /*
* Nothing to expand
*/
continue;
@@ -191,7 +182,7 @@
IActivity newActivity = endPoints.getRight().getLeft();
SuperActivity existingSuperActivity = invertedActivitySuperActivityMap.get(newActivity);
if (outputConn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
- /**
+ /*
* expend the super activity cluster on an one-to-one out-bound connection
*/
if (existingSuperActivity == null) {
@@ -199,13 +190,13 @@
toBeExpended.add(newActivity);
invertedActivitySuperActivityMap.put(newActivity, superActivity);
} else {
- /**
+ /*
* the two activities already in the same super activity
*/
if (existingSuperActivity == superActivity) {
continue;
}
- /**
+ /*
* swallow an existing super activity
*/
swallowExistingSuperActivity(superActivities, toBeExpendedMap,
@@ -214,7 +205,7 @@
}
} else {
if (existingSuperActivity == null) {
- /**
+ /*
* create new activity
*/
createNewSuperActivity(ac, superActivities, toBeExpendedMap,
@@ -224,10 +215,10 @@
}
}
- /**
+ /*
* remove the to-be-expended queue if it is empty
*/
- if (toBeExpended.size() == 0) {
+ if (toBeExpended.isEmpty()) {
toBeExpendedMap.remove(superActivityId);
}
}
@@ -237,28 +228,25 @@
Map<ConnectorDescriptorId, RecordDescriptor> connRecordDesc = ac.getConnectorRecordDescriptorMap();
Map<SuperActivity, Integer> superActivityProducerPort = new HashMap<>();
Map<SuperActivity, Integer> superActivityConsumerPort = new HashMap<>();
- for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
- superActivityProducerPort.put(entry.getValue(), 0);
- superActivityConsumerPort.put(entry.getValue(), 0);
- }
+ superActivities.forEach((key, value) -> {
+ superActivityProducerPort.put(value, 0);
+ superActivityConsumerPort.put(value, 0);
+ });
- /**
+ /*
* create a new activity cluster to replace the old activity cluster
*/
ActivityCluster newActivityCluster = new ActivityCluster(acg, ac.getId());
newActivityCluster.setConnectorPolicyAssignmentPolicy(ac.getConnectorPolicyAssignmentPolicy());
- for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
- newActivityCluster.addActivity(entry.getValue());
- acg.getActivityMap().put(entry.getKey(), newActivityCluster);
- }
+ superActivities.forEach((key, value) -> {
+ newActivityCluster.addActivity(value);
+ acg.getActivityMap().put(key, newActivityCluster);
+ });
- /**
+ /*
* Setup connectors: either inside a super activity or among super activities
*/
- for (Entry<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> entry : connectorActivityMap
- .entrySet()) {
- ConnectorDescriptorId connectorId = entry.getKey();
- Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = entry.getValue();
+ connectorActivityMap.forEach((connectorId, endPoints) -> {
IActivity producerActivity = endPoints.getLeft().getLeft();
IActivity consumerActivity = endPoints.getRight().getLeft();
int producerPort = endPoints.getLeft().getRight();
@@ -266,14 +254,14 @@
RecordDescriptor recordDescriptor = connRecordDesc.get(connectorId);
IConnectorDescriptor conn = connMap.get(connectorId);
if (conn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
- /**
+ /*
* connection edge between inner activities
*/
SuperActivity residingSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
residingSuperActivity.connect(conn, producerActivity, producerPort, consumerActivity, consumerPort,
recordDescriptor);
} else {
- /**
+ /*
* connection edge between super activities
*/
SuperActivity producerSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
@@ -284,7 +272,7 @@
newActivityCluster.connect(conn, producerSuperActivity, producerSAPort, consumerSuperActivity,
consumerSAPort, recordDescriptor);
- /**
+ /*
* bridge the port
*/
producerSuperActivity.setClusterOutputIndex(producerSAPort, producerActivity.getActivityId(),
@@ -293,30 +281,30 @@
consumerPort);
acg.getConnectorMap().put(connectorId, newActivityCluster);
- /**
+ /*
* increasing the port number for the producer and consumer
*/
superActivityProducerPort.put(producerSuperActivity, ++producerSAPort);
superActivityConsumerPort.put(consumerSuperActivity, ++consumerSAPort);
}
- }
+ });
- /**
+ /*
* Set up the roots of the new activity cluster
*/
- for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
- List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(entry.getKey());
+ superActivities.forEach((key, value) -> {
+ List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(key);
if (connIds == null || connIds.isEmpty()) {
- newActivityCluster.addRoot(entry.getValue());
+ newActivityCluster.addRoot(value);
}
- }
+ });
- /**
+ /*
* set up the blocked2Blocker mapping, which will be updated in the rewriteInterActivityCluster call
*/
newActivityCluster.getBlocked2BlockerMap().putAll(ac.getBlocked2BlockerMap());
- /**
+ /*
* replace the old activity cluster with the new activity cluster
*/
acg.getActivityClusterMap().put(ac.getId(), newActivityCluster);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
index 476a744..ddcfd78 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
@@ -59,17 +59,17 @@
throws HyracksDataException {
final Map<ActivityId, IActivity> startActivities = new HashMap<>();
Map<ActivityId, IActivity> activities = getActivityMap();
- for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
- /**
+ activities.forEach((key, value) -> {
+ /*
* extract start activities
*/
- List<IConnectorDescriptor> conns = getActivityInputMap().get(entry.getKey());
+ List<IConnectorDescriptor> conns = getActivityInputMap().get(key);
if (conns == null || conns.isEmpty()) {
- startActivities.put(entry.getKey(), entry.getValue());
+ startActivities.put(key, value);
}
- }
+ });
- /**
+ /*
* wrap a RecordDescriptorProvider for the super activity
*/
IRecordDescriptorProvider wrappedRecDescProvider = new IRecordDescriptorProvider() {
@@ -77,7 +77,7 @@
@Override
public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
if (startActivities.get(aid) != null) {
- /**
+ /*
* if the activity is a start (input boundary) activity
*/
int superActivityInputChannel = SuperActivity.this.getClusterInputIndex(Pair.of(aid, inputIndex));
@@ -86,14 +86,14 @@
}
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
- /**
+ /*
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityInputMap().get(aid).get(inputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
- /**
+ /*
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
@@ -121,7 +121,7 @@
@Override
public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
- /**
+ /*
* if the activity is an output-boundary activity
*/
int superActivityOutputChannel = SuperActivity.this.getClusterOutputIndex(Pair.of(aid, outputIndex));
@@ -130,14 +130,14 @@
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
- /**
+ /*
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityOutputMap().get(aid).get(outputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
- /**
+ /*
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
index 285e932..e9491f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
@@ -56,9 +56,9 @@
Properties prop = new Properties();
Map<Integer, String> errorMessageMap = new HashMap<>();
prop.load(resourceStream);
- for (Map.Entry<Object, Object> entry : prop.entrySet()) {
- String key = (String) entry.getKey();
- String msg = (String) entry.getValue();
+ prop.forEach((key1, value) -> {
+ String key = (String) key1;
+ String msg = (String) value;
if (key.contains(COMMA)) {
String[] codes = key.split(COMMA);
for (String code : codes) {
@@ -67,7 +67,7 @@
} else {
errorMessageMap.put(Integer.parseInt(key), msg);
}
- }
+ });
return errorMessageMap;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
index 66f7f10..e5eec11 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
-import java.util.Map;
public class ExecutionTimeProfiler {
@@ -67,9 +66,7 @@
try {
synchronized (lock1) {
sb.append("\n\n");
- for (Map.Entry<String, String> entry : spentTimePerJobMap.get(jobSignature).entrySet()) {
- sb.append(entry.getValue());
- }
+ spentTimePerJobMap.get(jobSignature).forEach((key, value) -> sb.append(value));
fos.write(sb.toString().getBytes());
fos.flush();
spentTimePerJobMap.get(jobSignature).clear();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index a3fbb70..7b99df2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -260,9 +260,9 @@
}
private void connectNCs() {
- getNCServices().entrySet().forEach(ncService -> {
+ getNCServices().forEach((key, value) -> {
final TriggerNCWork triggerWork = new TriggerNCWork(ClusterControllerService.this,
- ncService.getValue().getLeft(), ncService.getValue().getRight(), ncService.getKey());
+ value.getLeft(), value.getRight(), key);
executor.submit(triggerWork);
});
serviceCtx.addClusterLifecycleListener(new IClusterLifecycleListener() {
@@ -289,10 +289,9 @@
private void terminateNCServices() throws Exception {
List<ShutdownNCServiceWork> shutdownNCServiceWorks = new ArrayList<>();
- getNCServices().entrySet().forEach(ncService -> {
- if (ncService.getValue().getRight() != NCConfig.NCSERVICE_PORT_DISABLED) {
- ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(ncService.getValue().getLeft(),
- ncService.getValue().getRight(), ncService.getKey());
+ getNCServices().forEach((key, value) -> {
+ if (value.getRight() != NCConfig.NCSERVICE_PORT_DISABLED) {
+ ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(value.getLeft(), value.getRight(), key);
workQueue.schedule(shutdownWork);
shutdownNCServiceWorks.add(shutdownWork);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index a380967..3cd6235 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -135,11 +135,9 @@
@Override
public Map<String, NodeControllerInfo> getNodeControllerInfoMap() {
Map<String, NodeControllerInfo> result = new LinkedHashMap<>();
- for (Map.Entry<String, NodeControllerState> e : nodeRegistry.entrySet()) {
- NodeControllerState ncState = e.getValue();
- result.put(e.getKey(), new NodeControllerInfo(e.getKey(), NodeStatus.ALIVE, ncState.getDataPort(),
- ncState.getDatasetPort(), ncState.getMessagingPort(), ncState.getCapacity().getCores()));
- }
+ nodeRegistry.forEach(
+ (key, ncState) -> result.put(key, new NodeControllerInfo(key, NodeStatus.ALIVE, ncState.getDataPort(),
+ ncState.getDatasetPort(), ncState.getMessagingPort(), ncState.getCapacity().getCores())));
return result;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index 57b8c50..04166a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -242,15 +242,15 @@
JobRun jobRun = executor.getJobRun();
Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = jobRun.getConnectorPolicyMap();
- for (Map.Entry<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> e : taskConnectivity.entrySet()) {
- Set<TaskId> cluster = taskClusterMap.get(e.getKey());
- for (Pair<TaskId, ConnectorDescriptorId> p : e.getValue()) {
+ taskConnectivity.forEach((key, value) -> {
+ Set<TaskId> cluster = taskClusterMap.get(key);
+ for (Pair<TaskId, ConnectorDescriptorId> p : value) {
IConnectorPolicy cPolicy = connectorPolicies.get(p.getRight());
if (cPolicy.requiresProducerConsumerCoscheduling()) {
cluster.add(p.getLeft());
}
}
- }
+ });
/*
* We compute the transitive closure of this (producer-consumer) relation to find the largest set of
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
index a3078b6..8a69a6f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
@@ -65,8 +65,8 @@
import org.apache.hyracks.control.cc.work.JobCleanupWork;
import org.apache.hyracks.control.common.job.PartitionState;
import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
-import org.apache.hyracks.control.common.work.NoOpCallback;
import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.hyracks.control.common.work.NoOpCallback;
public class JobExecutor {
private static final Logger LOGGER = Logger.getLogger(JobExecutor.class.getName());
@@ -379,14 +379,13 @@
tcAttempt.initializePendingTaskCounter();
tcAttempts.add(tcAttempt);
- /**
+ /*
* Improvement for reducing master/slave message communications, for each TaskAttemptDescriptor,
* we set the NetworkAddress[][] partitionLocations, in which each row is for an incoming connector descriptor
* and each column is for an input channel of the connector.
*/
INodeManager nodeManager = ccs.getNodeManager();
- for (Map.Entry<String, List<TaskAttemptDescriptor>> e : taskAttemptMap.entrySet()) {
- List<TaskAttemptDescriptor> tads = e.getValue();
+ taskAttemptMap.forEach((key, tads) -> {
for (TaskAttemptDescriptor tad : tads) {
TaskAttemptId taid = tad.getTaskAttemptId();
int attempt = taid.getAttempt();
@@ -401,7 +400,7 @@
for (int i = 0; i < inPartitionCounts.length; ++i) {
ConnectorDescriptorId cdId = inConnectors.get(i).getConnectorId();
IConnectorPolicy policy = jobRun.getConnectorPolicyMap().get(cdId);
- /**
+ /*
* carry sender location information into a task
* when it is not the case that it is an re-attempt and the send-side
* is materialized blocking.
@@ -419,7 +418,7 @@
}
tad.setInputPartitionLocations(partitionLocations);
}
- }
+ });
tcAttempt.setStatus(TaskClusterAttempt.TaskClusterStatus.RUNNING);
tcAttempt.setStartTime(System.currentTimeMillis());
@@ -560,12 +559,11 @@
final JobId jobId = jobRun.getJobId();
LOGGER.info("Abort map for job: " + jobId + ": " + abortTaskAttemptMap);
INodeManager nodeManager = ccs.getNodeManager();
- for (Map.Entry<String, List<TaskAttemptId>> entry : abortTaskAttemptMap.entrySet()) {
- final NodeControllerState node = nodeManager.getNodeControllerState(entry.getKey());
- final List<TaskAttemptId> abortTaskAttempts = entry.getValue();
+ abortTaskAttemptMap.forEach((key, abortTaskAttempts) -> {
+ final NodeControllerState node = nodeManager.getNodeControllerState(key);
if (node != null) {
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Aborting: " + abortTaskAttempts + " at " + entry.getKey());
+ LOGGER.info("Aborting: " + abortTaskAttempts + " at " + key);
}
try {
node.getNodeController().abortTasks(jobId, abortTaskAttempts);
@@ -573,7 +571,7 @@
LOGGER.log(Level.SEVERE, e.getMessage(), e);
}
}
- }
+ });
inProgressTaskClusters.remove(tcAttempt.getTaskCluster());
TaskCluster tc = tcAttempt.getTaskCluster();
PartitionMatchMaker pmm = jobRun.getPartitionMatchMaker();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
index 95a6d9b..ef0bca2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
@@ -284,10 +284,9 @@
ObjectNode planJSON = om.createObjectNode();
ArrayNode acTasks = om.createArrayNode();
- for (Map.Entry<ActivityId, ActivityPlan> e : acp.getActivityPlanMap().entrySet()) {
- ActivityPlan acPlan = e.getValue();
+ acp.getActivityPlanMap().forEach((key, acPlan) -> {
ObjectNode entry = om.createObjectNode();
- entry.put("activity-id", e.getKey().toString());
+ entry.put("activity-id", key.toString());
ActivityPartitionDetails apd = acPlan.getActivityPartitionDetails();
entry.put("partition-count", apd.getPartitionCount());
@@ -319,21 +318,21 @@
ArrayNode dependentTasksJSON = om.createArrayNode();
for (TaskId dependent : t.getDependents()) {
dependentTasksJSON.add(dependent.toString());
- task.set("dependents", dependentTasksJSON);
+ task.set("dependents", dependentTasksJSON);
- ArrayNode dependencyTasksJSON = om.createArrayNode();
- for (TaskId dependency : t.getDependencies()) {
- dependencyTasksJSON.add(dependency.toString());
+ ArrayNode dependencyTasksJSON = om.createArrayNode();
+ for (TaskId dependency : t.getDependencies()) {
+ dependencyTasksJSON.add(dependency.toString());
+ }
+ task.set("dependencies", dependencyTasksJSON);
+
+ tasks.add(task);
}
- task.set("dependencies", dependencyTasksJSON);
+ entry.set("tasks", tasks);
- tasks.add(task);
+ acTasks.add(entry);
}
- entry.set("tasks", tasks);
-
- acTasks.add(entry);
- }
- }
+ });
planJSON.set("activities", acTasks);
ArrayNode tClusters = om.createArrayNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
index ee22768..3c26f84 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
@@ -18,16 +18,14 @@
*/
package org.apache.hyracks.control.cc.web;
-import java.util.Map;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.web.util.IJSONOutputFunction;
import org.apache.hyracks.control.cc.work.GatherStateDumpsWork;
import org.apache.hyracks.control.cc.work.GatherStateDumpsWork.StateDumpRun;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
public class StateDumpRESTAPIFunction implements IJSONOutputFunction {
private final ClusterControllerService ccs;
@@ -44,9 +42,7 @@
ObjectMapper om = new ObjectMapper();
ObjectNode result = om.createObjectNode();
- for (Map.Entry<String, String> e : sdr.getStateDump().entrySet()) {
- result.put(e.getKey(), e.getValue());
- }
+ sdr.getStateDump().forEach(result::put);
return result;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index d13b5e6..67738ba 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -344,20 +344,20 @@
private void applyDefaults() {
LOGGER.fine("applying defaults");
- for (Map.Entry<Section, Map<String, IOption>> entry : sectionMap.entrySet()) {
- if (entry.getKey() == Section.NC) {
- entry.getValue().values().forEach(option -> getNodeNames()
+ sectionMap.forEach((key, value) -> {
+ if (key == Section.NC) {
+ value.values().forEach(option -> getNodeNames()
.forEach(node -> getOrDefault(getNodeEffectiveMap(node), option, node)));
for (Map.Entry<String, Map<IOption, Object>> nodeMap : nodeSpecificMap.entrySet()) {
- entry.getValue().values()
+ value.values()
.forEach(option -> getOrDefault(
new CompositeMap<>(nodeMap.getValue(), definedMap, new NoOpMapMutator()), option,
nodeMap.getKey()));
}
} else {
- entry.getValue().values().forEach(option -> getOrDefault(configurationMap, option, null));
+ value.values().forEach(option -> getOrDefault(configurationMap, option, null));
}
- }
+ });
}
private Object getOrDefault(Map<IOption, Object> map, IOption option, String nodeId) {
@@ -450,15 +450,13 @@
public Ini toIni(boolean includeDefaults) {
Ini ini = new Ini();
- for (Map.Entry<IOption, Object> entry : (includeDefaults ? configurationMap : definedMap).entrySet()) {
- if (entry.getValue() != null) {
- final IOption option = entry.getKey();
- ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(entry.getValue()));
+ (includeDefaults ? configurationMap : definedMap).forEach((option, value) -> {
+ if (value != null) {
+ ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(value));
}
- }
- for (Map.Entry<String, Map<IOption, Object>> nodeMapEntry : nodeSpecificMap.entrySet()) {
- String section = Section.NC.sectionName() + "/" + nodeMapEntry.getKey();
- final Map<IOption, Object> nodeValueMap = nodeMapEntry.getValue();
+ });
+ nodeSpecificMap.forEach((key, nodeValueMap) -> {
+ String section = Section.NC.sectionName() + "/" + key;
synchronized (nodeValueMap) {
for (Map.Entry<IOption, Object> entry : nodeValueMap.entrySet()) {
if (entry.getValue() != null) {
@@ -467,7 +465,7 @@
}
}
}
- }
+ });
return ini;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
index d1d33a5..90dfc8c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
@@ -26,19 +26,19 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hyracks.api.io.IWritable;
+
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.io.IWritable;
-
public abstract class AbstractProfile implements IWritable, Serializable {
private static final long serialVersionUID = 1L;
protected Map<String, Long> counters;
public AbstractProfile() {
- counters = new HashMap<String, Long>();
+ counters = new HashMap<>();
}
public Map<String, Long> getCounters() {
@@ -50,12 +50,12 @@
protected void populateCounters(ObjectNode jo) {
ObjectMapper om = new ObjectMapper();
ArrayNode countersObj = om.createArrayNode();
- for (Map.Entry<String, Long> e : counters.entrySet()) {
+ counters.forEach((key, value) -> {
ObjectNode jpe = om.createObjectNode();
- jpe.put("name", e.getKey());
- jpe.put("value", e.getValue());
+ jpe.put("name", key);
+ jpe.put("value", value);
countersObj.add(jpe);
- }
+ });
jo.set("counters", countersObj);
}
@@ -75,7 +75,7 @@
@Override
public void readFields(DataInput input) throws IOException {
int size = input.readInt();
- counters = new HashMap<String, Long>();
+ counters = new HashMap<>();
for (int i = 0; i < size; i++) {
String key = input.readUTF();
long value = input.readLong();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
index 79a5538..64d074b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
@@ -25,10 +25,11 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hyracks.api.job.JobId;
+
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.job.JobId;
public class JobProfile extends AbstractProfile {
private static final long serialVersionUID = 1L;
@@ -49,7 +50,7 @@
public JobProfile(JobId jobId) {
this.jobId = jobId;
- jobletProfiles = new HashMap<String, JobletProfile>();
+ jobletProfiles = new HashMap<>();
}
public JobId getJobId() {
@@ -91,7 +92,7 @@
public void readFields(DataInput input) throws IOException {
jobId = JobId.create(input);
int size = input.readInt();
- jobletProfiles = new HashMap<String, JobletProfile>();
+ jobletProfiles = new HashMap<>();
for (int i = 0; i < size; i++) {
String key = input.readUTF();
JobletProfile value = JobletProfile.create(input);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
index c3792df..5bdb1b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
@@ -25,11 +25,11 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hyracks.api.dataflow.TaskAttemptId;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.dataflow.TaskAttemptId;
public class JobletProfile extends AbstractProfile {
private static final long serialVersionUID = 1L;
@@ -50,7 +50,7 @@
public JobletProfile(String nodeId) {
this.nodeId = nodeId;
- taskProfiles = new HashMap<TaskAttemptId, TaskProfile>();
+ taskProfiles = new HashMap<>();
}
public String getNodeId() {
@@ -67,7 +67,7 @@
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
- json.put("node-id", nodeId.toString());
+ json.put("node-id", nodeId);
populateCounters(json);
ArrayNode tasks = om.createArrayNode();
for (TaskProfile p : taskProfiles.values()) {
@@ -94,7 +94,7 @@
super.readFields(input);
nodeId = input.readUTF();
int size = input.readInt();
- taskProfiles = new HashMap<TaskAttemptId, TaskProfile>();
+ taskProfiles = new HashMap<>();
for (int i = 0; i < size; i++) {
TaskAttemptId key = TaskAttemptId.create(input);
TaskProfile value = TaskProfile.create(input);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
index 6dc9619..66a5e0f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
@@ -186,9 +186,7 @@
public void dumpProfile(JobletProfile jProfile) {
Map<String, Long> counters = jProfile.getCounters();
- for (Map.Entry<String, Counter> e : counterMap.entrySet()) {
- counters.put(e.getKey(), e.getValue().get());
- }
+ counterMap.forEach((key, value) -> counters.put(key, value.get()));
for (Task task : taskMap.values()) {
TaskProfile taskProfile = new TaskProfile(task.getTaskAttemptId(),
new Hashtable<>(task.getPartitionSendProfile()), new StatsCollector());
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
index c58a2fa..11148a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
@@ -35,9 +35,9 @@
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import org.apache.hyracks.util.IntSerDeUtils;
import org.apache.hyracks.dataflow.std.sort.Utility;
import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.hyracks.util.IntSerDeUtils;
public abstract class AbstractTupleMemoryManagerTest {
ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] {
@@ -51,13 +51,13 @@
protected void assertEachTupleInFTAIsInBuffer(Map<Integer, Integer> map, Map<TuplePointer, Integer> mapInserted) {
ITuplePointerAccessor accessor = getTuplePointerAccessor();
- for (Map.Entry<TuplePointer, Integer> entry : mapInserted.entrySet()) {
- accessor.reset(entry.getKey());
- int dataLength = map.get(entry.getValue());
- assertEquals((int) entry.getValue(),
+ mapInserted.forEach((key, value) -> {
+ accessor.reset(key);
+ int dataLength = map.get(value);
+ assertEquals((int) value,
IntSerDeUtils.getInt(accessor.getBuffer().array(), accessor.getAbsFieldStartOffset(0)));
assertEquals(dataLength, accessor.getTupleLength());
- }
+ });
assertEquals(map.size(), mapInserted.size());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
index 521dff1..b0c210f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
@@ -130,15 +130,15 @@
@Override
public void open() throws HyracksDataException {
- for (Map.Entry<Integer, String> keyValue : keyValueMap.entrySet()) {
- Result result = answer.get(keyValue.getValue());
+ keyValueMap.forEach((key, value) -> {
+ Result result = answer.get(value);
if (result == null) {
- answer.put(keyValue.getValue(), new Result(keyValue.getKey()));
+ answer.put(value, new Result(key));
} else {
- result.sum += keyValue.getKey();
+ result.sum += key;
result.count++;
}
- }
+ });
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
index 15d11b0..4937a15 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -20,7 +20,6 @@
import java.net.InetAddress;
import java.util.ArrayList;
-import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -30,7 +29,6 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.InputSplit;
-
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.api.topology.ClusterTopology;
import org.apache.hyracks.hdfs.api.INcCollection;
@@ -51,41 +49,32 @@
final int[] workloads, final int slotLimit) {
try {
final Map<List<Integer>, List<String>> pathToNCs = new HashMap<List<Integer>, List<String>>();
- for (int i = 0; i < NCs.length; i++) {
- List<Integer> path = new ArrayList<Integer>();
+ for (String NC : NCs) {
+ List<Integer> path = new ArrayList<>();
String ipAddress = InetAddress.getByAddress(
- ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress()).getHostAddress();
+ ncNameToNcInfos.get(NC).getNetworkAddress().lookupIpAddress()).getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
- if (path.size() <= 0) {
+ if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
path.add(Integer.MIN_VALUE);
- LOGGER.info(NCs[i] + "'s IP address is not in the cluster toplogy file!");
+ LOGGER.info(NC + "'s IP address is not in the cluster toplogy file!");
}
- List<String> ncs = pathToNCs.get(path);
- if (ncs == null) {
- ncs = new ArrayList<String>();
- pathToNCs.put(path, ncs);
- }
- ncs.add(NCs[i]);
+ List<String> ncs = pathToNCs.computeIfAbsent(path, k -> new ArrayList<>());
+ ncs.add(NC);
}
final TreeMap<List<Integer>, IntWritable> availableIpsToSlots = new TreeMap<List<Integer>, IntWritable>(
- new Comparator<List<Integer>>() {
-
- @Override
- public int compare(List<Integer> l1, List<Integer> l2) {
- int commonLength = Math.min(l1.size(), l2.size());
- for (int i = 0; i < commonLength; i++) {
- Integer value1 = l1.get(i);
- Integer value2 = l2.get(i);
- int cmp = value1 > value2 ? 1 : (value1 < value2 ? -1 : 0);
- if (cmp != 0) {
- return cmp;
- }
+ (l1, l2) -> {
+ int commonLength = Math.min(l1.size(), l2.size());
+ for (int i = 0; i < commonLength; i++) {
+ int value1 = l1.get(i);
+ int value2 = l2.get(i);
+ int cmp = Integer.compare(value1, value2);
+ if (cmp != 0) {
+ return cmp;
}
- return l1.size() > l2.size() ? 1 : (l1.size() < l2.size() ? -1 : 0);
}
-
+ return Integer.compare(l1.size(), l2.size());
});
for (int i = 0; i < workloads.length; i++) {
if (workloads[i] < slotLimit) {
@@ -93,7 +82,7 @@
String ipAddress = InetAddress.getByAddress(
ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress()).getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
- if (path.size() <= 0) {
+ if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
path.add(Integer.MIN_VALUE);
}
@@ -115,41 +104,38 @@
int minDistance = Integer.MAX_VALUE;
List<Integer> currentCandidatePath = null;
if (locs == null || locs.length > 0) {
- for (int j = 0; j < locs.length; j++) {
- /**
+ for (String loc : locs) {
+ /*
* get all the IP addresses from the name
*/
- InetAddress[] allIps = InetAddress.getAllByName(locs[j]);
+ InetAddress[] allIps = InetAddress.getAllByName(loc);
boolean inTopology = false;
for (InetAddress ip : allIps) {
- List<Integer> splitPath = new ArrayList<Integer>();
+ List<Integer> splitPath = new ArrayList<>();
boolean inCluster = topology.lookupNetworkTerminal(ip.getHostAddress(), splitPath);
if (!inCluster) {
continue;
}
inTopology = true;
- /**
+ /*
* if the node controller exists
*/
List<Integer> candidatePath = availableIpsToSlots.floorKey(splitPath);
if (candidatePath == null) {
candidatePath = availableIpsToSlots.ceilingKey(splitPath);
}
- if (candidatePath != null) {
- if (availableIpsToSlots.get(candidatePath).get() > 0) {
- int distance = distance(splitPath, candidatePath);
- if (minDistance > distance) {
- minDistance = distance;
- currentCandidatePath = candidatePath;
- }
+ if (candidatePath != null && availableIpsToSlots.get(candidatePath).get() > 0) {
+ int distance = distance(splitPath, candidatePath);
+ if (minDistance > distance) {
+ minDistance = distance;
+ currentCandidatePath = candidatePath;
}
-
}
}
if (!inTopology) {
- LOGGER.info(locs[j] + "'s IP address is not in the cluster toplogy file!");
- /**
+ LOGGER.info(loc + "'s IP address is not in the cluster toplogy file!");
+ /*
* if the machine is not in the toplogy file
*/
List<Integer> candidatePath = null;
@@ -159,11 +145,9 @@
break;
}
}
- /** the split path is empty */
- if (candidatePath != null) {
- if (availableIpsToSlots.get(candidatePath).get() > 0) {
- currentCandidatePath = candidatePath;
- }
+ /* the split path is empty */
+ if (candidatePath != null && availableIpsToSlots.get(candidatePath).get() > 0) {
+ currentCandidatePath = candidatePath;
}
}
}
@@ -176,8 +160,8 @@
}
}
- if (currentCandidatePath != null && currentCandidatePath.size() > 0) {
- /**
+ if (currentCandidatePath != null && !currentCandidatePath.isEmpty()) {
+ /*
* Update the entry of the selected IP
*/
IntWritable availableSlot = availableIpsToSlots.get(currentCandidatePath);
@@ -185,7 +169,7 @@
if (availableSlot.get() == 0) {
availableIpsToSlots.remove(currentCandidatePath);
}
- /**
+ /*
* Update the entry of the selected NC
*/
List<String> candidateNcs = pathToNCs.get(currentCandidatePath);
@@ -196,7 +180,7 @@
}
}
}
- /** not scheduled */
+ /* not scheduled */
return null;
} catch (Exception e) {
throw new IllegalStateException(e);
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
index 2d1c87a..f9b68bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
@@ -379,23 +379,19 @@
ncNameToIndex.clear();
int i = 0;
- /**
+ /*
* build the IP address to NC map
*/
for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().lookupIpAddress())
.getHostAddress();
- List<String> matchedNCs = ipToNcMapping.get(ipAddr);
- if (matchedNCs == null) {
- matchedNCs = new ArrayList<String>();
- ipToNcMapping.put(ipAddr, matchedNCs);
- }
+ List<String> matchedNCs = ipToNcMapping.computeIfAbsent(ipAddr, k -> new ArrayList<>());
matchedNCs.add(entry.getKey());
NCs[i] = entry.getKey();
i++;
}
- /**
+ /*
* set up the NC name to index mapping
*/
for (i = 0; i < NCs.length; i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
index 75c5bed..6d8f9cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
@@ -123,8 +123,7 @@
protected void addDependenciesToLicenseMap() throws ProjectBuildingException {
Map<MavenProject, List<Pair<String, String>>> dependencyLicenseMap = gatherDependencies();
- for (Map.Entry<MavenProject, List<Pair<String, String>>> dep : dependencyLicenseMap.entrySet()) {
- final MavenProject depProject = dep.getKey();
+ dependencyLicenseMap.forEach((depProject, value) -> {
Set<String> locations = dependencySets.isEmpty() ? Collections.singleton(location)
: getIncludedLocation(depProject.getArtifact());
if (isExcluded(depProject.getArtifact())) {
@@ -133,10 +132,10 @@
getLog().debug("skipping " + depProject + " [not included in dependency sets]");
} else {
for (String depLocation : locations) {
- addDependencyToLicenseMap(depProject, dep.getValue(), depLocation);
+ addDependencyToLicenseMap(depProject, value, depLocation);
}
}
- }
+ });
}
private int getLicenseMetric(String url) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index 6304a9a..fcea8e0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -801,19 +801,19 @@
}
synchronized (fileInfoMap) {
- for (Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
+ fileInfoMap.forEach((key, value) -> {
try {
- boolean fileHasBeenDeleted = entry.getValue().fileHasBeenDeleted();
- sweepAndFlush(entry.getKey(), !fileHasBeenDeleted);
+ boolean fileHasBeenDeleted = value.fileHasBeenDeleted();
+ sweepAndFlush(key, !fileHasBeenDeleted);
if (!fileHasBeenDeleted) {
- ioManager.close(entry.getValue().getFileHandle());
+ ioManager.close(value.getFileHandle());
}
} catch (HyracksDataException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Error flushing file id: " + entry.getKey(), e);
+ LOGGER.log(Level.WARNING, "Error flushing file id: " + key, e);
}
}
- }
+ });
fileInfoMap.clear();
}
}