[NO ISSUE][HYR][*DB] Minor refactoring / address SonarQube comments
Change-Id: Icf10b6df0fdc006675d8f0da6fd06d50200c6b6a
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2098
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Contrib: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 1c2b9b5..084626e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -103,12 +103,8 @@
public void finish() {
Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap = jag
.getConnectorActivityMap();
- for (Map.Entry<ConnectorDescriptorId, Pair<IActivity, Integer>> e : connectorProducerMap.entrySet()) {
- ConnectorDescriptorId cdId = e.getKey();
- Pair<IActivity, Integer> producer = e.getValue();
- Pair<IActivity, Integer> consumer = connectorConsumerMap.get(cdId);
- caMap.put(cdId, Pair.of(producer, consumer));
- }
+ connectorProducerMap
+ .forEach((cdId, producer) -> caMap.put(cdId, Pair.of(producer, connectorConsumerMap.get(cdId))));
}
private <K, V> void addToValueSet(Map<K, Set<V>> map, K n1, V n2) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
index 2cf96c2..9f66080 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
@@ -309,11 +309,7 @@
}
private <K, V> void insertIntoIndexedMap(Map<K, List<V>> map, K key, int index, V value) {
- List<V> vList = map.get(key);
- if (vList == null) {
- vList = new ArrayList<>();
- map.put(key, vList);
- }
+ List<V> vList = map.computeIfAbsent(key, k -> new ArrayList<>());
extend(vList, index);
vList.set(index, value);
}
@@ -322,9 +318,9 @@
public String toString() {
StringBuilder buffer = new StringBuilder();
- for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
- buffer.append(e.getKey().getId()).append(" : ").append(e.getValue().toString()).append("\n");
- List<IConnectorDescriptor> inputs = opInputMap.get(e.getKey());
+ opMap.forEach((key, value) -> {
+ buffer.append(key.getId()).append(" : ").append(value.toString()).append("\n");
+ List<IConnectorDescriptor> inputs = opInputMap.get(key);
if (inputs != null && !inputs.isEmpty()) {
buffer.append(" Inputs:\n");
for (IConnectorDescriptor c : inputs) {
@@ -332,7 +328,7 @@
.append("\n");
}
}
- List<IConnectorDescriptor> outputs = opOutputMap.get(e.getKey());
+ List<IConnectorDescriptor> outputs = opOutputMap.get(key);
if (outputs != null && !outputs.isEmpty()) {
buffer.append(" Outputs:\n");
for (IConnectorDescriptor c : outputs) {
@@ -340,7 +336,7 @@
.append("\n");
}
}
- }
+ });
buffer.append("\n").append("Constraints:\n").append(userConstraints);
@@ -352,8 +348,8 @@
ObjectNode jjob = om.createObjectNode();
ArrayNode jopArray = om.createArrayNode();
- for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
- ObjectNode op = e.getValue().toJSON();
+ opMap.forEach((key, value) -> {
+ ObjectNode op = value.toJSON();
if (!userConstraints.isEmpty()) {
// Add operator partition constraints to each JSON operator.
ObjectNode pcObject = om.createObjectNode();
@@ -364,12 +360,12 @@
ExpressionTag tag = constraint.getLValue().getTag();
if (tag == ExpressionTag.PARTITION_COUNT) {
PartitionCountExpression pce = (PartitionCountExpression) constraint.getLValue();
- if (e.getKey() == pce.getOperatorDescriptorId()) {
+ if (key == pce.getOperatorDescriptorId()) {
pcObject.put("count", getConstraintExpressionRValue(constraint));
}
} else if (tag == ExpressionTag.PARTITION_LOCATION) {
PartitionLocationExpression ple = (PartitionLocationExpression) constraint.getLValue();
- if (e.getKey() == ple.getOperatorDescriptorId()) {
+ if (key == ple.getOperatorDescriptorId()) {
pleObject.put(Integer.toString(ple.getPartition()),
getConstraintExpressionRValue(constraint));
}
@@ -383,23 +379,23 @@
}
}
jopArray.add(op);
- }
+ });
jjob.set("operators", jopArray);
ArrayNode jcArray = om.createArrayNode();
- for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
+ connMap.forEach((key, value) -> {
ObjectNode conn = om.createObjectNode();
Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection =
- connectorOpMap.get(e.getKey());
+ connectorOpMap.get(key);
if (connection != null) {
conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString());
conn.put("in-operator-port", connection.getLeft().getRight().intValue());
conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
conn.put("out-operator-port", connection.getRight().getRight().intValue());
}
- conn.set("connector", e.getValue().toJSON());
+ conn.set("connector", value.toJSON());
jcArray.add(conn);
- }
+ });
jjob.set("connectors", jcArray);
return jjob;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
index 20f128d..7cdb300 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
@@ -29,7 +29,6 @@
import java.util.Set;
import org.apache.commons.lang3.tuple.Pair;
-
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
import org.apache.hyracks.api.dataflow.IActivity;
@@ -37,7 +36,6 @@
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.job.ActivityCluster;
import org.apache.hyracks.api.job.ActivityClusterGraph;
-import org.apache.hyracks.api.job.ActivityClusterId;
import org.apache.hyracks.api.rewriter.runtime.SuperActivity;
/**
@@ -65,12 +63,10 @@
acg.getActivityMap().clear();
acg.getConnectorMap().clear();
Map<IActivity, SuperActivity> invertedActivitySuperActivityMap = new HashMap<>();
- for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
- rewriteIntraActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
- }
- for (Entry<ActivityClusterId, ActivityCluster> entry : acg.getActivityClusterMap().entrySet()) {
- rewriteInterActivityCluster(entry.getValue(), invertedActivitySuperActivityMap);
- }
+ acg.getActivityClusterMap()
+ .forEach((key, value) -> rewriteIntraActivityCluster(value, invertedActivitySuperActivityMap));
+ acg.getActivityClusterMap()
+ .forEach((key, value) -> rewriteInterActivityCluster(value, invertedActivitySuperActivityMap));
invertedActivitySuperActivityMap.clear();
}
@@ -84,14 +80,11 @@
Map<IActivity, SuperActivity> invertedActivitySuperActivityMap) {
Map<ActivityId, Set<ActivityId>> blocked2BlockerMap = ac.getBlocked2BlockerMap();
Map<ActivityId, ActivityId> invertedAid2SuperAidMap = new HashMap<>();
- for (Entry<IActivity, SuperActivity> entry : invertedActivitySuperActivityMap.entrySet()) {
- invertedAid2SuperAidMap.put(entry.getKey().getActivityId(), entry.getValue().getActivityId());
- }
+ invertedActivitySuperActivityMap
+ .forEach((key, value) -> invertedAid2SuperAidMap.put(key.getActivityId(), value.getActivityId()));
Map<ActivityId, Set<ActivityId>> replacedBlocked2BlockerMap = new HashMap<>();
- for (Entry<ActivityId, Set<ActivityId>> entry : blocked2BlockerMap.entrySet()) {
- ActivityId blocked = entry.getKey();
+ blocked2BlockerMap.forEach((blocked, blockers) -> {
ActivityId replacedBlocked = invertedAid2SuperAidMap.get(blocked);
- Set<ActivityId> blockers = entry.getValue();
Set<ActivityId> replacedBlockers = null;
if (blockers != null) {
replacedBlockers = new HashSet<>();
@@ -113,7 +106,7 @@
replacedBlocked2BlockerMap.put(replacedBlocked, existingBlockers);
}
}
- }
+ });
blocked2BlockerMap.clear();
blocked2BlockerMap.putAll(replacedBlocked2BlockerMap);
}
@@ -136,23 +129,21 @@
Map<ActivityId, SuperActivity> superActivities = new HashMap<>();
Map<ActivityId, Queue<IActivity>> toBeExpendedMap = new HashMap<>();
- /**
+ /*
* Build the initial super activities
*/
- for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
- ActivityId activityId = entry.getKey();
- IActivity activity = entry.getValue();
+ activities.forEach((activityId, activity) -> {
if (activityInputMap.get(activityId) == null) {
startActivities.put(activityId, activity);
- /**
+ /*
* use the start activity's id as the id of the super activity
*/
createNewSuperActivity(ac, superActivities, toBeExpendedMap, invertedActivitySuperActivityMap,
activityId, activity);
}
- }
+ });
- /**
+ /*
* expand one-to-one connected activity cluster by the BFS order.
* after the while-loop, the original activities are partitioned
* into equivalent classes, one-per-super-activity.
@@ -165,19 +156,19 @@
ActivityId superActivityId = entry.getKey();
SuperActivity superActivity = entry.getValue();
- /**
+ /*
* for the case where the super activity has already been swallowed
*/
if (superActivities.get(superActivityId) == null) {
continue;
}
- /**
+ /*
* expend the super activity
*/
Queue<IActivity> toBeExpended = toBeExpendedMap.get(superActivityId);
if (toBeExpended == null) {
- /**
+ /*
* Nothing to expand
*/
continue;
@@ -191,7 +182,7 @@
IActivity newActivity = endPoints.getRight().getLeft();
SuperActivity existingSuperActivity = invertedActivitySuperActivityMap.get(newActivity);
if (outputConn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
- /**
+ /*
* expend the super activity cluster on an one-to-one out-bound connection
*/
if (existingSuperActivity == null) {
@@ -199,13 +190,13 @@
toBeExpended.add(newActivity);
invertedActivitySuperActivityMap.put(newActivity, superActivity);
} else {
- /**
+ /*
* the two activities already in the same super activity
*/
if (existingSuperActivity == superActivity) {
continue;
}
- /**
+ /*
* swallow an existing super activity
*/
swallowExistingSuperActivity(superActivities, toBeExpendedMap,
@@ -214,7 +205,7 @@
}
} else {
if (existingSuperActivity == null) {
- /**
+ /*
* create new activity
*/
createNewSuperActivity(ac, superActivities, toBeExpendedMap,
@@ -224,10 +215,10 @@
}
}
- /**
+ /*
* remove the to-be-expended queue if it is empty
*/
- if (toBeExpended.size() == 0) {
+ if (toBeExpended.isEmpty()) {
toBeExpendedMap.remove(superActivityId);
}
}
@@ -237,28 +228,25 @@
Map<ConnectorDescriptorId, RecordDescriptor> connRecordDesc = ac.getConnectorRecordDescriptorMap();
Map<SuperActivity, Integer> superActivityProducerPort = new HashMap<>();
Map<SuperActivity, Integer> superActivityConsumerPort = new HashMap<>();
- for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
- superActivityProducerPort.put(entry.getValue(), 0);
- superActivityConsumerPort.put(entry.getValue(), 0);
- }
+ superActivities.forEach((key, value) -> {
+ superActivityProducerPort.put(value, 0);
+ superActivityConsumerPort.put(value, 0);
+ });
- /**
+ /*
* create a new activity cluster to replace the old activity cluster
*/
ActivityCluster newActivityCluster = new ActivityCluster(acg, ac.getId());
newActivityCluster.setConnectorPolicyAssignmentPolicy(ac.getConnectorPolicyAssignmentPolicy());
- for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
- newActivityCluster.addActivity(entry.getValue());
- acg.getActivityMap().put(entry.getKey(), newActivityCluster);
- }
+ superActivities.forEach((key, value) -> {
+ newActivityCluster.addActivity(value);
+ acg.getActivityMap().put(key, newActivityCluster);
+ });
- /**
+ /*
* Setup connectors: either inside a super activity or among super activities
*/
- for (Entry<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> entry : connectorActivityMap
- .entrySet()) {
- ConnectorDescriptorId connectorId = entry.getKey();
- Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = entry.getValue();
+ connectorActivityMap.forEach((connectorId, endPoints) -> {
IActivity producerActivity = endPoints.getLeft().getLeft();
IActivity consumerActivity = endPoints.getRight().getLeft();
int producerPort = endPoints.getLeft().getRight();
@@ -266,14 +254,14 @@
RecordDescriptor recordDescriptor = connRecordDesc.get(connectorId);
IConnectorDescriptor conn = connMap.get(connectorId);
if (conn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
- /**
+ /*
* connection edge between inner activities
*/
SuperActivity residingSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
residingSuperActivity.connect(conn, producerActivity, producerPort, consumerActivity, consumerPort,
recordDescriptor);
} else {
- /**
+ /*
* connection edge between super activities
*/
SuperActivity producerSuperActivity = invertedActivitySuperActivityMap.get(producerActivity);
@@ -284,7 +272,7 @@
newActivityCluster.connect(conn, producerSuperActivity, producerSAPort, consumerSuperActivity,
consumerSAPort, recordDescriptor);
- /**
+ /*
* bridge the port
*/
producerSuperActivity.setClusterOutputIndex(producerSAPort, producerActivity.getActivityId(),
@@ -293,30 +281,30 @@
consumerPort);
acg.getConnectorMap().put(connectorId, newActivityCluster);
- /**
+ /*
* increasing the port number for the producer and consumer
*/
superActivityProducerPort.put(producerSuperActivity, ++producerSAPort);
superActivityConsumerPort.put(consumerSuperActivity, ++consumerSAPort);
}
- }
+ });
- /**
+ /*
* Set up the roots of the new activity cluster
*/
- for (Entry<ActivityId, SuperActivity> entry : superActivities.entrySet()) {
- List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(entry.getKey());
+ superActivities.forEach((key, value) -> {
+ List<IConnectorDescriptor> connIds = newActivityCluster.getActivityOutputMap().get(key);
if (connIds == null || connIds.isEmpty()) {
- newActivityCluster.addRoot(entry.getValue());
+ newActivityCluster.addRoot(value);
}
- }
+ });
- /**
+ /*
* set up the blocked2Blocker mapping, which will be updated in the rewriteInterActivityCluster call
*/
newActivityCluster.getBlocked2BlockerMap().putAll(ac.getBlocked2BlockerMap());
- /**
+ /*
* replace the old activity cluster with the new activity cluster
*/
acg.getActivityClusterMap().put(ac.getId(), newActivityCluster);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
index 476a744..ddcfd78 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/runtime/SuperActivity.java
@@ -59,17 +59,17 @@
throws HyracksDataException {
final Map<ActivityId, IActivity> startActivities = new HashMap<>();
Map<ActivityId, IActivity> activities = getActivityMap();
- for (Entry<ActivityId, IActivity> entry : activities.entrySet()) {
- /**
+ activities.forEach((key, value) -> {
+ /*
* extract start activities
*/
- List<IConnectorDescriptor> conns = getActivityInputMap().get(entry.getKey());
+ List<IConnectorDescriptor> conns = getActivityInputMap().get(key);
if (conns == null || conns.isEmpty()) {
- startActivities.put(entry.getKey(), entry.getValue());
+ startActivities.put(key, value);
}
- }
+ });
- /**
+ /*
* wrap a RecordDescriptorProvider for the super activity
*/
IRecordDescriptorProvider wrappedRecDescProvider = new IRecordDescriptorProvider() {
@@ -77,7 +77,7 @@
@Override
public RecordDescriptor getInputRecordDescriptor(ActivityId aid, int inputIndex) {
if (startActivities.get(aid) != null) {
- /**
+ /*
* if the activity is a start (input boundary) activity
*/
int superActivityInputChannel = SuperActivity.this.getClusterInputIndex(Pair.of(aid, inputIndex));
@@ -86,14 +86,14 @@
}
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
- /**
+ /*
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityInputMap().get(aid).get(inputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
- /**
+ /*
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
@@ -121,7 +121,7 @@
@Override
public RecordDescriptor getOutputRecordDescriptor(ActivityId aid, int outputIndex) {
- /**
+ /*
* if the activity is an output-boundary activity
*/
int superActivityOutputChannel = SuperActivity.this.getClusterOutputIndex(Pair.of(aid, outputIndex));
@@ -130,14 +130,14 @@
}
if (SuperActivity.this.getActivityMap().get(aid) != null) {
- /**
+ /*
* if the activity is an internal activity of the super activity
*/
IConnectorDescriptor conn = getActivityOutputMap().get(aid).get(outputIndex);
return getConnectorRecordDescriptorMap().get(conn.getConnectorId());
}
- /**
+ /*
* the following is for the case where the activity is in other SuperActivities
*/
ActivityClusterGraph acg = SuperActivity.this.getActivityClusterGraph();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
index 285e932..e9491f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ErrorMessageUtil.java
@@ -56,9 +56,9 @@
Properties prop = new Properties();
Map<Integer, String> errorMessageMap = new HashMap<>();
prop.load(resourceStream);
- for (Map.Entry<Object, Object> entry : prop.entrySet()) {
- String key = (String) entry.getKey();
- String msg = (String) entry.getValue();
+ prop.forEach((key1, value) -> {
+ String key = (String) key1;
+ String msg = (String) value;
if (key.contains(COMMA)) {
String[] codes = key.split(COMMA);
for (String code : codes) {
@@ -67,7 +67,7 @@
} else {
errorMessageMap.put(Integer.parseInt(key), msg);
}
- }
+ });
return errorMessageMap;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
index 66f7f10..e5eec11 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
@@ -22,7 +22,6 @@
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
-import java.util.Map;
public class ExecutionTimeProfiler {
@@ -67,9 +66,7 @@
try {
synchronized (lock1) {
sb.append("\n\n");
- for (Map.Entry<String, String> entry : spentTimePerJobMap.get(jobSignature).entrySet()) {
- sb.append(entry.getValue());
- }
+ spentTimePerJobMap.get(jobSignature).forEach((key, value) -> sb.append(value));
fos.write(sb.toString().getBytes());
fos.flush();
spentTimePerJobMap.get(jobSignature).clear();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index a3fbb70..7b99df2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -260,9 +260,9 @@
}
private void connectNCs() {
- getNCServices().entrySet().forEach(ncService -> {
+ getNCServices().forEach((key, value) -> {
final TriggerNCWork triggerWork = new TriggerNCWork(ClusterControllerService.this,
- ncService.getValue().getLeft(), ncService.getValue().getRight(), ncService.getKey());
+ value.getLeft(), value.getRight(), key);
executor.submit(triggerWork);
});
serviceCtx.addClusterLifecycleListener(new IClusterLifecycleListener() {
@@ -289,10 +289,9 @@
private void terminateNCServices() throws Exception {
List<ShutdownNCServiceWork> shutdownNCServiceWorks = new ArrayList<>();
- getNCServices().entrySet().forEach(ncService -> {
- if (ncService.getValue().getRight() != NCConfig.NCSERVICE_PORT_DISABLED) {
- ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(ncService.getValue().getLeft(),
- ncService.getValue().getRight(), ncService.getKey());
+ getNCServices().forEach((key, value) -> {
+ if (value.getRight() != NCConfig.NCSERVICE_PORT_DISABLED) {
+ ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(value.getLeft(), value.getRight(), key);
workQueue.schedule(shutdownWork);
shutdownNCServiceWorks.add(shutdownWork);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index a380967..3cd6235 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -135,11 +135,9 @@
@Override
public Map<String, NodeControllerInfo> getNodeControllerInfoMap() {
Map<String, NodeControllerInfo> result = new LinkedHashMap<>();
- for (Map.Entry<String, NodeControllerState> e : nodeRegistry.entrySet()) {
- NodeControllerState ncState = e.getValue();
- result.put(e.getKey(), new NodeControllerInfo(e.getKey(), NodeStatus.ALIVE, ncState.getDataPort(),
- ncState.getDatasetPort(), ncState.getMessagingPort(), ncState.getCapacity().getCores()));
- }
+ nodeRegistry.forEach(
+ (key, ncState) -> result.put(key, new NodeControllerInfo(key, NodeStatus.ALIVE, ncState.getDataPort(),
+ ncState.getDatasetPort(), ncState.getMessagingPort(), ncState.getCapacity().getCores())));
return result;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index 57b8c50..04166a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -242,15 +242,15 @@
JobRun jobRun = executor.getJobRun();
Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = jobRun.getConnectorPolicyMap();
- for (Map.Entry<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> e : taskConnectivity.entrySet()) {
- Set<TaskId> cluster = taskClusterMap.get(e.getKey());
- for (Pair<TaskId, ConnectorDescriptorId> p : e.getValue()) {
+ taskConnectivity.forEach((key, value) -> {
+ Set<TaskId> cluster = taskClusterMap.get(key);
+ for (Pair<TaskId, ConnectorDescriptorId> p : value) {
IConnectorPolicy cPolicy = connectorPolicies.get(p.getRight());
if (cPolicy.requiresProducerConsumerCoscheduling()) {
cluster.add(p.getLeft());
}
}
- }
+ });
/*
* We compute the transitive closure of this (producer-consumer) relation to find the largest set of
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
index a3078b6..8a69a6f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/JobExecutor.java
@@ -65,8 +65,8 @@
import org.apache.hyracks.control.cc.work.JobCleanupWork;
import org.apache.hyracks.control.common.job.PartitionState;
import org.apache.hyracks.control.common.job.TaskAttemptDescriptor;
-import org.apache.hyracks.control.common.work.NoOpCallback;
import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.hyracks.control.common.work.NoOpCallback;
public class JobExecutor {
private static final Logger LOGGER = Logger.getLogger(JobExecutor.class.getName());
@@ -379,14 +379,13 @@
tcAttempt.initializePendingTaskCounter();
tcAttempts.add(tcAttempt);
- /**
+ /*
* Improvement for reducing master/slave message communications, for each TaskAttemptDescriptor,
* we set the NetworkAddress[][] partitionLocations, in which each row is for an incoming connector descriptor
* and each column is for an input channel of the connector.
*/
INodeManager nodeManager = ccs.getNodeManager();
- for (Map.Entry<String, List<TaskAttemptDescriptor>> e : taskAttemptMap.entrySet()) {
- List<TaskAttemptDescriptor> tads = e.getValue();
+ taskAttemptMap.forEach((key, tads) -> {
for (TaskAttemptDescriptor tad : tads) {
TaskAttemptId taid = tad.getTaskAttemptId();
int attempt = taid.getAttempt();
@@ -401,7 +400,7 @@
for (int i = 0; i < inPartitionCounts.length; ++i) {
ConnectorDescriptorId cdId = inConnectors.get(i).getConnectorId();
IConnectorPolicy policy = jobRun.getConnectorPolicyMap().get(cdId);
- /**
+ /*
* carry sender location information into a task
* when it is not the case that it is an re-attempt and the send-side
* is materialized blocking.
@@ -419,7 +418,7 @@
}
tad.setInputPartitionLocations(partitionLocations);
}
- }
+ });
tcAttempt.setStatus(TaskClusterAttempt.TaskClusterStatus.RUNNING);
tcAttempt.setStartTime(System.currentTimeMillis());
@@ -560,12 +559,11 @@
final JobId jobId = jobRun.getJobId();
LOGGER.info("Abort map for job: " + jobId + ": " + abortTaskAttemptMap);
INodeManager nodeManager = ccs.getNodeManager();
- for (Map.Entry<String, List<TaskAttemptId>> entry : abortTaskAttemptMap.entrySet()) {
- final NodeControllerState node = nodeManager.getNodeControllerState(entry.getKey());
- final List<TaskAttemptId> abortTaskAttempts = entry.getValue();
+ abortTaskAttemptMap.forEach((key, abortTaskAttempts) -> {
+ final NodeControllerState node = nodeManager.getNodeControllerState(key);
if (node != null) {
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Aborting: " + abortTaskAttempts + " at " + entry.getKey());
+ LOGGER.info("Aborting: " + abortTaskAttempts + " at " + key);
}
try {
node.getNodeController().abortTasks(jobId, abortTaskAttempts);
@@ -573,7 +571,7 @@
LOGGER.log(Level.SEVERE, e.getMessage(), e);
}
}
- }
+ });
inProgressTaskClusters.remove(tcAttempt.getTaskCluster());
TaskCluster tc = tcAttempt.getTaskCluster();
PartitionMatchMaker pmm = jobRun.getPartitionMatchMaker();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
index 95a6d9b..ef0bca2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
@@ -284,10 +284,9 @@
ObjectNode planJSON = om.createObjectNode();
ArrayNode acTasks = om.createArrayNode();
- for (Map.Entry<ActivityId, ActivityPlan> e : acp.getActivityPlanMap().entrySet()) {
- ActivityPlan acPlan = e.getValue();
+ acp.getActivityPlanMap().forEach((key, acPlan) -> {
ObjectNode entry = om.createObjectNode();
- entry.put("activity-id", e.getKey().toString());
+ entry.put("activity-id", key.toString());
ActivityPartitionDetails apd = acPlan.getActivityPartitionDetails();
entry.put("partition-count", apd.getPartitionCount());
@@ -319,21 +318,21 @@
ArrayNode dependentTasksJSON = om.createArrayNode();
for (TaskId dependent : t.getDependents()) {
dependentTasksJSON.add(dependent.toString());
- task.set("dependents", dependentTasksJSON);
+ task.set("dependents", dependentTasksJSON);
- ArrayNode dependencyTasksJSON = om.createArrayNode();
- for (TaskId dependency : t.getDependencies()) {
- dependencyTasksJSON.add(dependency.toString());
+ ArrayNode dependencyTasksJSON = om.createArrayNode();
+ for (TaskId dependency : t.getDependencies()) {
+ dependencyTasksJSON.add(dependency.toString());
+ }
+ task.set("dependencies", dependencyTasksJSON);
+
+ tasks.add(task);
}
- task.set("dependencies", dependencyTasksJSON);
+ entry.set("tasks", tasks);
- tasks.add(task);
+ acTasks.add(entry);
}
- entry.set("tasks", tasks);
-
- acTasks.add(entry);
- }
- }
+ });
planJSON.set("activities", acTasks);
ArrayNode tClusters = om.createArrayNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
index ee22768..3c26f84 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
@@ -18,16 +18,14 @@
*/
package org.apache.hyracks.control.cc.web;
-import java.util.Map;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.web.util.IJSONOutputFunction;
import org.apache.hyracks.control.cc.work.GatherStateDumpsWork;
import org.apache.hyracks.control.cc.work.GatherStateDumpsWork.StateDumpRun;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
public class StateDumpRESTAPIFunction implements IJSONOutputFunction {
private final ClusterControllerService ccs;
@@ -44,9 +42,7 @@
ObjectMapper om = new ObjectMapper();
ObjectNode result = om.createObjectNode();
- for (Map.Entry<String, String> e : sdr.getStateDump().entrySet()) {
- result.put(e.getKey(), e.getValue());
- }
+ sdr.getStateDump().forEach(result::put);
return result;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index d13b5e6..67738ba 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -344,20 +344,20 @@
private void applyDefaults() {
LOGGER.fine("applying defaults");
- for (Map.Entry<Section, Map<String, IOption>> entry : sectionMap.entrySet()) {
- if (entry.getKey() == Section.NC) {
- entry.getValue().values().forEach(option -> getNodeNames()
+ sectionMap.forEach((key, value) -> {
+ if (key == Section.NC) {
+ value.values().forEach(option -> getNodeNames()
.forEach(node -> getOrDefault(getNodeEffectiveMap(node), option, node)));
for (Map.Entry<String, Map<IOption, Object>> nodeMap : nodeSpecificMap.entrySet()) {
- entry.getValue().values()
+ value.values()
.forEach(option -> getOrDefault(
new CompositeMap<>(nodeMap.getValue(), definedMap, new NoOpMapMutator()), option,
nodeMap.getKey()));
}
} else {
- entry.getValue().values().forEach(option -> getOrDefault(configurationMap, option, null));
+ value.values().forEach(option -> getOrDefault(configurationMap, option, null));
}
- }
+ });
}
private Object getOrDefault(Map<IOption, Object> map, IOption option, String nodeId) {
@@ -450,15 +450,13 @@
public Ini toIni(boolean includeDefaults) {
Ini ini = new Ini();
- for (Map.Entry<IOption, Object> entry : (includeDefaults ? configurationMap : definedMap).entrySet()) {
- if (entry.getValue() != null) {
- final IOption option = entry.getKey();
- ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(entry.getValue()));
+ (includeDefaults ? configurationMap : definedMap).forEach((option, value) -> {
+ if (value != null) {
+ ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(value));
}
- }
- for (Map.Entry<String, Map<IOption, Object>> nodeMapEntry : nodeSpecificMap.entrySet()) {
- String section = Section.NC.sectionName() + "/" + nodeMapEntry.getKey();
- final Map<IOption, Object> nodeValueMap = nodeMapEntry.getValue();
+ });
+ nodeSpecificMap.forEach((key, nodeValueMap) -> {
+ String section = Section.NC.sectionName() + "/" + key;
synchronized (nodeValueMap) {
for (Map.Entry<IOption, Object> entry : nodeValueMap.entrySet()) {
if (entry.getValue() != null) {
@@ -467,7 +465,7 @@
}
}
}
- }
+ });
return ini;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
index d1d33a5..90dfc8c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
@@ -26,19 +26,19 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hyracks.api.io.IWritable;
+
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.io.IWritable;
-
public abstract class AbstractProfile implements IWritable, Serializable {
private static final long serialVersionUID = 1L;
protected Map<String, Long> counters;
public AbstractProfile() {
- counters = new HashMap<String, Long>();
+ counters = new HashMap<>();
}
public Map<String, Long> getCounters() {
@@ -50,12 +50,12 @@
protected void populateCounters(ObjectNode jo) {
ObjectMapper om = new ObjectMapper();
ArrayNode countersObj = om.createArrayNode();
- for (Map.Entry<String, Long> e : counters.entrySet()) {
+ counters.forEach((key, value) -> {
ObjectNode jpe = om.createObjectNode();
- jpe.put("name", e.getKey());
- jpe.put("value", e.getValue());
+ jpe.put("name", key);
+ jpe.put("value", value);
countersObj.add(jpe);
- }
+ });
jo.set("counters", countersObj);
}
@@ -75,7 +75,7 @@
@Override
public void readFields(DataInput input) throws IOException {
int size = input.readInt();
- counters = new HashMap<String, Long>();
+ counters = new HashMap<>();
for (int i = 0; i < size; i++) {
String key = input.readUTF();
long value = input.readLong();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
index 79a5538..64d074b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
@@ -25,10 +25,11 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hyracks.api.job.JobId;
+
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.job.JobId;
public class JobProfile extends AbstractProfile {
private static final long serialVersionUID = 1L;
@@ -49,7 +50,7 @@
public JobProfile(JobId jobId) {
this.jobId = jobId;
- jobletProfiles = new HashMap<String, JobletProfile>();
+ jobletProfiles = new HashMap<>();
}
public JobId getJobId() {
@@ -91,7 +92,7 @@
public void readFields(DataInput input) throws IOException {
jobId = JobId.create(input);
int size = input.readInt();
- jobletProfiles = new HashMap<String, JobletProfile>();
+ jobletProfiles = new HashMap<>();
for (int i = 0; i < size; i++) {
String key = input.readUTF();
JobletProfile value = JobletProfile.create(input);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
index c3792df..5bdb1b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
@@ -25,11 +25,11 @@
import java.util.Map;
import java.util.Map.Entry;
+import org.apache.hyracks.api.dataflow.TaskAttemptId;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.hyracks.api.dataflow.TaskAttemptId;
public class JobletProfile extends AbstractProfile {
private static final long serialVersionUID = 1L;
@@ -50,7 +50,7 @@
public JobletProfile(String nodeId) {
this.nodeId = nodeId;
- taskProfiles = new HashMap<TaskAttemptId, TaskProfile>();
+ taskProfiles = new HashMap<>();
}
public String getNodeId() {
@@ -67,7 +67,7 @@
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
- json.put("node-id", nodeId.toString());
+ json.put("node-id", nodeId);
populateCounters(json);
ArrayNode tasks = om.createArrayNode();
for (TaskProfile p : taskProfiles.values()) {
@@ -94,7 +94,7 @@
super.readFields(input);
nodeId = input.readUTF();
int size = input.readInt();
- taskProfiles = new HashMap<TaskAttemptId, TaskProfile>();
+ taskProfiles = new HashMap<>();
for (int i = 0; i < size; i++) {
TaskAttemptId key = TaskAttemptId.create(input);
TaskProfile value = TaskProfile.create(input);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
index 6dc9619..66a5e0f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Joblet.java
@@ -186,9 +186,7 @@
public void dumpProfile(JobletProfile jProfile) {
Map<String, Long> counters = jProfile.getCounters();
- for (Map.Entry<String, Counter> e : counterMap.entrySet()) {
- counters.put(e.getKey(), e.getValue().get());
- }
+ counterMap.forEach((key, value) -> counters.put(key, value.get()));
for (Task task : taskMap.values()) {
TaskProfile taskProfile = new TaskProfile(task.getTaskAttemptId(),
new Hashtable<>(task.getPartitionSendProfile()), new StatsCollector());
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
index c58a2fa..11148a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
@@ -35,9 +35,9 @@
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import org.apache.hyracks.util.IntSerDeUtils;
import org.apache.hyracks.dataflow.std.sort.Utility;
import org.apache.hyracks.dataflow.std.structures.TuplePointer;
+import org.apache.hyracks.util.IntSerDeUtils;
public abstract class AbstractTupleMemoryManagerTest {
ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] {
@@ -51,13 +51,13 @@
protected void assertEachTupleInFTAIsInBuffer(Map<Integer, Integer> map, Map<TuplePointer, Integer> mapInserted) {
ITuplePointerAccessor accessor = getTuplePointerAccessor();
- for (Map.Entry<TuplePointer, Integer> entry : mapInserted.entrySet()) {
- accessor.reset(entry.getKey());
- int dataLength = map.get(entry.getValue());
- assertEquals((int) entry.getValue(),
+ mapInserted.forEach((key, value) -> {
+ accessor.reset(key);
+ int dataLength = map.get(value);
+ assertEquals((int) value,
IntSerDeUtils.getInt(accessor.getBuffer().array(), accessor.getAbsFieldStartOffset(0)));
assertEquals(dataLength, accessor.getTupleLength());
- }
+ });
assertEquals(map.size(), mapInserted.size());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
index 521dff1..b0c210f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
@@ -130,15 +130,15 @@
@Override
public void open() throws HyracksDataException {
- for (Map.Entry<Integer, String> keyValue : keyValueMap.entrySet()) {
- Result result = answer.get(keyValue.getValue());
+ keyValueMap.forEach((key, value) -> {
+ Result result = answer.get(value);
if (result == null) {
- answer.put(keyValue.getValue(), new Result(keyValue.getKey()));
+ answer.put(value, new Result(key));
} else {
- result.sum += keyValue.getKey();
+ result.sum += key;
result.count++;
}
- }
+ });
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
index 15d11b0..4937a15 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -20,7 +20,6 @@
import java.net.InetAddress;
import java.util.ArrayList;
-import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -30,7 +29,6 @@
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapred.InputSplit;
-
import org.apache.hyracks.api.client.NodeControllerInfo;
import org.apache.hyracks.api.topology.ClusterTopology;
import org.apache.hyracks.hdfs.api.INcCollection;
@@ -51,41 +49,32 @@
final int[] workloads, final int slotLimit) {
try {
final Map<List<Integer>, List<String>> pathToNCs = new HashMap<List<Integer>, List<String>>();
- for (int i = 0; i < NCs.length; i++) {
- List<Integer> path = new ArrayList<Integer>();
+ for (String NC : NCs) {
+ List<Integer> path = new ArrayList<>();
String ipAddress = InetAddress.getByAddress(
- ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress()).getHostAddress();
+ ncNameToNcInfos.get(NC).getNetworkAddress().lookupIpAddress()).getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
- if (path.size() <= 0) {
+ if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
path.add(Integer.MIN_VALUE);
- LOGGER.info(NCs[i] + "'s IP address is not in the cluster toplogy file!");
+ LOGGER.info(NC + "'s IP address is not in the cluster toplogy file!");
}
- List<String> ncs = pathToNCs.get(path);
- if (ncs == null) {
- ncs = new ArrayList<String>();
- pathToNCs.put(path, ncs);
- }
- ncs.add(NCs[i]);
+ List<String> ncs = pathToNCs.computeIfAbsent(path, k -> new ArrayList<>());
+ ncs.add(NC);
}
final TreeMap<List<Integer>, IntWritable> availableIpsToSlots = new TreeMap<List<Integer>, IntWritable>(
- new Comparator<List<Integer>>() {
-
- @Override
- public int compare(List<Integer> l1, List<Integer> l2) {
- int commonLength = Math.min(l1.size(), l2.size());
- for (int i = 0; i < commonLength; i++) {
- Integer value1 = l1.get(i);
- Integer value2 = l2.get(i);
- int cmp = value1 > value2 ? 1 : (value1 < value2 ? -1 : 0);
- if (cmp != 0) {
- return cmp;
- }
+ (l1, l2) -> {
+ int commonLength = Math.min(l1.size(), l2.size());
+ for (int i = 0; i < commonLength; i++) {
+ int value1 = l1.get(i);
+ int value2 = l2.get(i);
+ int cmp = Integer.compare(value1, value2);
+ if (cmp != 0) {
+ return cmp;
}
- return l1.size() > l2.size() ? 1 : (l1.size() < l2.size() ? -1 : 0);
}
-
+ return Integer.compare(l1.size(), l2.size());
});
for (int i = 0; i < workloads.length; i++) {
if (workloads[i] < slotLimit) {
@@ -93,7 +82,7 @@
String ipAddress = InetAddress.getByAddress(
ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress()).getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
- if (path.size() <= 0) {
+ if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
path.add(Integer.MIN_VALUE);
}
@@ -115,41 +104,38 @@
int minDistance = Integer.MAX_VALUE;
List<Integer> currentCandidatePath = null;
if (locs == null || locs.length > 0) {
- for (int j = 0; j < locs.length; j++) {
- /**
+ for (String loc : locs) {
+ /*
* get all the IP addresses from the name
*/
- InetAddress[] allIps = InetAddress.getAllByName(locs[j]);
+ InetAddress[] allIps = InetAddress.getAllByName(loc);
boolean inTopology = false;
for (InetAddress ip : allIps) {
- List<Integer> splitPath = new ArrayList<Integer>();
+ List<Integer> splitPath = new ArrayList<>();
boolean inCluster = topology.lookupNetworkTerminal(ip.getHostAddress(), splitPath);
if (!inCluster) {
continue;
}
inTopology = true;
- /**
+ /*
* if the node controller exists
*/
List<Integer> candidatePath = availableIpsToSlots.floorKey(splitPath);
if (candidatePath == null) {
candidatePath = availableIpsToSlots.ceilingKey(splitPath);
}
- if (candidatePath != null) {
- if (availableIpsToSlots.get(candidatePath).get() > 0) {
- int distance = distance(splitPath, candidatePath);
- if (minDistance > distance) {
- minDistance = distance;
- currentCandidatePath = candidatePath;
- }
+ if (candidatePath != null && availableIpsToSlots.get(candidatePath).get() > 0) {
+ int distance = distance(splitPath, candidatePath);
+ if (minDistance > distance) {
+ minDistance = distance;
+ currentCandidatePath = candidatePath;
}
-
}
}
if (!inTopology) {
- LOGGER.info(locs[j] + "'s IP address is not in the cluster toplogy file!");
- /**
+ LOGGER.info(loc + "'s IP address is not in the cluster toplogy file!");
+ /*
* if the machine is not in the toplogy file
*/
List<Integer> candidatePath = null;
@@ -159,11 +145,9 @@
break;
}
}
- /** the split path is empty */
- if (candidatePath != null) {
- if (availableIpsToSlots.get(candidatePath).get() > 0) {
- currentCandidatePath = candidatePath;
- }
+ /* the split path is empty */
+ if (candidatePath != null && availableIpsToSlots.get(candidatePath).get() > 0) {
+ currentCandidatePath = candidatePath;
}
}
}
@@ -176,8 +160,8 @@
}
}
- if (currentCandidatePath != null && currentCandidatePath.size() > 0) {
- /**
+ if (currentCandidatePath != null && !currentCandidatePath.isEmpty()) {
+ /*
* Update the entry of the selected IP
*/
IntWritable availableSlot = availableIpsToSlots.get(currentCandidatePath);
@@ -185,7 +169,7 @@
if (availableSlot.get() == 0) {
availableIpsToSlots.remove(currentCandidatePath);
}
- /**
+ /*
* Update the entry of the selected NC
*/
List<String> candidateNcs = pathToNCs.get(currentCandidatePath);
@@ -196,7 +180,7 @@
}
}
}
- /** not scheduled */
+ /* not scheduled */
return null;
} catch (Exception e) {
throw new IllegalStateException(e);
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
index 2d1c87a..f9b68bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
@@ -379,23 +379,19 @@
ncNameToIndex.clear();
int i = 0;
- /**
+ /*
* build the IP address to NC map
*/
for (Map.Entry<String, NodeControllerInfo> entry : ncNameToNcInfos.entrySet()) {
String ipAddr = InetAddress.getByAddress(entry.getValue().getNetworkAddress().lookupIpAddress())
.getHostAddress();
- List<String> matchedNCs = ipToNcMapping.get(ipAddr);
- if (matchedNCs == null) {
- matchedNCs = new ArrayList<String>();
- ipToNcMapping.put(ipAddr, matchedNCs);
- }
+ List<String> matchedNCs = ipToNcMapping.computeIfAbsent(ipAddr, k -> new ArrayList<>());
matchedNCs.add(entry.getKey());
NCs[i] = entry.getKey();
i++;
}
- /**
+ /*
* set up the NC name to index mapping
*/
for (i = 0; i < NCs.length; i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
index 75c5bed..6d8f9cf 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
@@ -123,8 +123,7 @@
protected void addDependenciesToLicenseMap() throws ProjectBuildingException {
Map<MavenProject, List<Pair<String, String>>> dependencyLicenseMap = gatherDependencies();
- for (Map.Entry<MavenProject, List<Pair<String, String>>> dep : dependencyLicenseMap.entrySet()) {
- final MavenProject depProject = dep.getKey();
+ dependencyLicenseMap.forEach((depProject, value) -> {
Set<String> locations = dependencySets.isEmpty() ? Collections.singleton(location)
: getIncludedLocation(depProject.getArtifact());
if (isExcluded(depProject.getArtifact())) {
@@ -133,10 +132,10 @@
getLog().debug("skipping " + depProject + " [not included in dependency sets]");
} else {
for (String depLocation : locations) {
- addDependencyToLicenseMap(depProject, dep.getValue(), depLocation);
+ addDependencyToLicenseMap(depProject, value, depLocation);
}
}
- }
+ });
}
private int getLicenseMetric(String url) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index 6304a9a..fcea8e0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -801,19 +801,19 @@
}
synchronized (fileInfoMap) {
- for (Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
+ fileInfoMap.forEach((key, value) -> {
try {
- boolean fileHasBeenDeleted = entry.getValue().fileHasBeenDeleted();
- sweepAndFlush(entry.getKey(), !fileHasBeenDeleted);
+ boolean fileHasBeenDeleted = value.fileHasBeenDeleted();
+ sweepAndFlush(key, !fileHasBeenDeleted);
if (!fileHasBeenDeleted) {
- ioManager.close(entry.getValue().getFileHandle());
+ ioManager.close(value.getFileHandle());
}
} catch (HyracksDataException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "Error flushing file id: " + entry.getKey(), e);
+ LOGGER.log(Level.WARNING, "Error flushing file id: " + key, e);
}
}
- }
+ });
fileInfoMap.clear();
}
}