ASTERIXDB-1714: Eliminate dependency on org.json
Change-Id: Ie9c5400fd134ae75d43385255af7794e968b1c7e
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1392
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
BAD: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Till Westmann <tillw@apache.org>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml b/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
index 17dd1ea..3bd1316 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/pom.xml
@@ -68,14 +68,13 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>org.apache.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
index 0b7cb7e..1123c5e 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/AlgebricksMetaOperatorDescriptor.java
@@ -19,7 +19,10 @@
package org.apache.hyracks.algebricks.runtime.operators.meta;
import java.nio.ByteBuffer;
+import java.util.Arrays;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.algebricks.runtime.base.AlgebricksPipeline;
import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import org.apache.hyracks.api.comm.IFrameWriter;
@@ -32,8 +35,6 @@
import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
import org.apache.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-import org.json.JSONException;
-import org.json.JSONObject;
public class AlgebricksMetaOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
@@ -56,9 +57,9 @@
}
@Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = super.toJSON();
- json.put("micro-operators", pipeline.getRuntimeFactories());
+ public ObjectNode toJSON() {
+ ObjectNode json = super.toJSON();
+ json.put("micro-operators", Arrays.toString(pipeline.getRuntimeFactories()));
return json;
}
@@ -70,10 +71,6 @@
sb.append(" " + f.toString() + ";\n");
}
sb.append("}");
- // sb.append(super.getInputArity());
- // sb.append(";");
- // sb.append(super.getOutputArity());
- // sb.append(";");
return sb.toString();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
index f49189d..7c42c75 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
@@ -154,16 +154,15 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>org.apache.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
+ <dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/pom.xml b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
index e624be6..c872bea 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
@@ -58,13 +58,6 @@
</build>
<dependencies>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- <type>jar</type>
- <scope>compile</scope>
- </dependency>
- <dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
@@ -97,5 +90,10 @@
<version>2.0.2-beta</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ <version>2.8.1</version>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
index 9a2a658..7dd5fe9 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
@@ -28,7 +28,6 @@
import java.util.logging.Logger;
import org.apache.commons.lang3.tuple.Pair;
-import org.json.JSONException;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.IActivity;
@@ -150,12 +149,7 @@
acg.addActivityClusters(acList);
if (LOGGER.isLoggable(Level.FINE)) {
- try {
- LOGGER.fine(acg.toJSON().toString(2));
- } catch (JSONException e) {
- e.printStackTrace();
- throw new RuntimeException(e);
- }
+ LOGGER.fine(acg.toJSON().asText());
}
return acg;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IConnectorDescriptor.java
index 7219040..abff2f7 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IConnectorDescriptor.java
@@ -21,6 +21,7 @@
import java.io.Serializable;
import java.util.BitSet;
+import com.fasterxml.jackson.databind.JsonNode;
import org.apache.hyracks.api.application.ICCApplicationContext;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.comm.IPartitionCollector;
@@ -30,8 +31,6 @@
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.ActivityCluster;
-import org.json.JSONException;
-import org.json.JSONObject;
/**
* Connector that connects operators in a Job.
@@ -133,9 +132,8 @@
* Translate this connector descriptor to JSON.
*
* @return
- * @throws JSONException
*/
- public JSONObject toJSON() throws JSONException;
+ public JsonNode toJSON();
/**
* Sets the connector Id
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IOperatorDescriptor.java
index 26561e6..b02c4f2 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/IOperatorDescriptor.java
@@ -20,11 +20,11 @@
import java.io.Serializable;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.application.ICCApplicationContext;
import org.apache.hyracks.api.constraints.IConstraintAcceptor;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.json.JSONException;
-import org.json.JSONObject;
/**
* Descriptor for operators in Hyracks.
@@ -99,5 +99,5 @@
/**
* Translates this operator descriptor to JSON.
*/
- public JSONObject toJSON() throws JSONException;
+ public ObjectNode toJSON();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
index 391b637..23ae97b 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
@@ -18,14 +18,14 @@
*/
package org.apache.hyracks.api.dataflow.value;
-import org.json.JSONException;
-import org.json.JSONObject;
+
+import com.fasterxml.jackson.databind.node.ObjectNode;
public interface JSONSerializable {
/**
* Returns the JSON representation of the object.
*
- * @return A new JSONObject instance representing this Java object.
+ * @return A om.createObjectNode instance representing this Java object.
*/
- public JSONObject toJSON() throws JSONException;
+ public ObjectNode toJSON() ;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
index cefdd7d..e5fad32 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
@@ -25,10 +25,11 @@
import java.util.Map;
import java.util.Set;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang3.tuple.Pair;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
@@ -186,50 +187,50 @@
vList.set(index, value);
}
- public JSONObject toJSON() throws JSONException {
- JSONObject jac = new JSONObject();
-
- JSONArray jans = new JSONArray();
+ public JsonNode toJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ArrayNode jans = om.createArrayNode();
+ ObjectNode jac = om.createObjectNode();
for (IActivity an : activities.values()) {
- JSONObject jan = new JSONObject();
+ ObjectNode jan = om.createObjectNode();
jan.put("id", an.getActivityId().toString());
jan.put("java-class", an.getClass().getName());
List<IConnectorDescriptor> inputs = activityInputMap.get(an.getActivityId());
if (inputs != null) {
- JSONArray jInputs = new JSONArray();
+ ArrayNode jInputs = om.createArrayNode();
for (int i = 0; i < inputs.size(); ++i) {
- JSONObject jInput = new JSONObject();
+ ObjectNode jInput = om.createObjectNode();
jInput.put("input-port", i);
jInput.put("connector-id", inputs.get(i).getConnectorId().toString());
- jInputs.put(jInput);
+ jInputs.add(jInput);
}
- jan.put("inputs", jInputs);
+ jan.set("inputs", jInputs);
}
List<IConnectorDescriptor> outputs = activityOutputMap.get(an.getActivityId());
if (outputs != null) {
- JSONArray jOutputs = new JSONArray();
+ ArrayNode jOutputs = om.createArrayNode();
for (int i = 0; i < outputs.size(); ++i) {
- JSONObject jOutput = new JSONObject();
+ ObjectNode jOutput = om.createObjectNode();
jOutput.put("output-port", i);
jOutput.put("connector-id", outputs.get(i).getConnectorId().toString());
- jOutputs.put(jOutput);
+ jOutputs.add(jOutput);
}
- jan.put("outputs", jOutputs);
+ jan.set("outputs", jOutputs);
}
Set<ActivityId> blockers = getBlocked2BlockerMap().get(an.getActivityId());
if (blockers != null) {
- JSONArray jDeps = new JSONArray();
+ ArrayNode jDeps = om.createArrayNode();
for (ActivityId blocker : blockers) {
- jDeps.put(blocker.toString());
+ jDeps.add(blocker.toString());
}
- jan.put("depends-on", jDeps);
+ jan.set("depends-on", jDeps);
}
- jans.put(jan);
+ jans.add(jan);
}
- jac.put("activities", jans);
+ jac.set("activities", jans);
return jac;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
index 7df619d..b64e2d5 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
@@ -24,10 +24,9 @@
import java.util.List;
import java.util.Map;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
@@ -162,15 +161,15 @@
return ac.getProducerActivity(cid);
}
- public JSONObject toJSON() throws JSONException {
- JSONObject acgj = new JSONObject();
-
- JSONArray acl = new JSONArray();
+ public ObjectNode toJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode acgj = om.createObjectNode();
+ ArrayNode acl = om.createArrayNode();
for (ActivityCluster ac : activityClusterMap.values()) {
- acl.put(ac.toJSON());
+ acl.add(ac.toJSON());
}
acgj.put("version", version);
- acgj.put("activity-clusters", acl);
+ acgj.set("activity-clusters", acl);
return acgj;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
index 7b44ff5..84a961e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSpecification.java
@@ -18,6 +18,7 @@
*/
package org.apache.hyracks.api.job;
+import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
@@ -27,6 +28,10 @@
import java.util.Map;
import java.util.Set;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.constraints.Constraint;
import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
@@ -39,9 +44,6 @@
import org.apache.hyracks.api.dataflow.connectors.IConnectorPolicyAssignmentPolicy;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.dataset.ResultSetId;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
public class JobSpecification implements Serializable, IOperatorDescriptorRegistry, IConnectorDescriptorRegistry {
private static final long serialVersionUID = 1L;
@@ -343,16 +345,17 @@
}
@SuppressWarnings("incomplete-switch")
- public JSONObject toJSON() throws JSONException {
- JSONObject jjob = new JSONObject();
+ public ObjectNode toJSON() throws IOException {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode jjob = om.createObjectNode();
- JSONArray jopArray = new JSONArray();
+ ArrayNode jopArray = om.createArrayNode();
for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> e : opMap.entrySet()) {
- JSONObject op = e.getValue().toJSON();
+ ObjectNode op = e.getValue().toJSON();
if (!userConstraints.isEmpty()) {
// Add operator partition constraints to each JSON operator.
- JSONObject pcObject = new JSONObject();
- JSONObject pleObject = new JSONObject();
+ ObjectNode pcObject = om.createObjectNode();
+ ObjectNode pleObject = om.createObjectNode();
Iterator<Constraint> test = userConstraints.iterator();
while (test.hasNext()) {
Constraint constraint = test.next();
@@ -372,20 +375,20 @@
break;
}
}
- if (pleObject.length() > 0) {
- pcObject.put("location", pleObject);
+ if (pleObject.size() > 0) {
+ pcObject.set("location", pleObject);
}
- if (pcObject.length() > 0) {
- op.put("partition-constraints", pcObject);
+ if (pcObject.size() > 0) {
+ op.set("partition-constraints", pcObject);
}
}
- jopArray.put(op);
+ jopArray.add(op);
}
- jjob.put("operators", jopArray);
+ jjob.set("operators", jopArray);
- JSONArray jcArray = new JSONArray();
+ ArrayNode jcArray = om.createArrayNode();
for (Map.Entry<ConnectorDescriptorId, IConnectorDescriptor> e : connMap.entrySet()) {
- JSONObject conn = new JSONObject();
+ ObjectNode conn = om.createObjectNode();
Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connection = connectorOpMap
.get(e.getKey());
if (connection != null) {
@@ -394,10 +397,10 @@
conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString());
conn.put("out-operator-port", connection.getRight().getRight().intValue());
}
- conn.put("connector", e.getValue().toJSON());
- jcArray.put(conn);
+ conn.set("connector", e.getValue().toJSON());
+ jcArray.add(conn);
}
- jjob.put("connectors", jcArray);
+ jjob.set("connectors", jcArray);
return jjob;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-client/pom.xml b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
index bf5d167..a532366 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-client/pom.xml
@@ -102,14 +102,13 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- <version>1.1</version>
- </dependency>
- <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
index 74b61b2..5d0865c 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
@@ -23,16 +23,12 @@
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.job.profiling.counters.ICounter;
import org.apache.hyracks.client.stats.AggregateCounter;
import org.apache.hyracks.client.stats.Counters;
@@ -44,7 +40,8 @@
*/
public class ClientCounterContext implements IClusterCounterContext {
private static String[] RESET_COUNTERS = { Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE,
- Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.DISK_READ, Counters.DISK_WRITE, Counters.NUM_PROCESSOR };
+ Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.DISK_READ, Counters.DISK_WRITE,
+ Counters.NUM_PROCESSOR };
private static String[] AGG_COUNTERS = { Counters.SYSTEM_LOAD };
private static int UPDATE_INTERVAL = 10000;
@@ -118,8 +115,8 @@
public ICounter getCounter(String machineName, String counterName, boolean create) {
Counter counter = counterMap.get(machineName + "$" + counterName);
if (counter == null) {
- throw new IllegalStateException("request an unknown counter: " + counterName + " on slave machine "
- + machineName + "!");
+ throw new IllegalStateException(
+ "request an unknown counter: " + counterName + " on slave machine " + machineName + "!");
}
return counter;
}
@@ -132,11 +129,11 @@
reset();
for (String slave : slaveMachines) {
String slaveProfile = requestProfile(slave);
- JSONParser parser = new JSONParser();
- JSONObject jo = (JSONObject) parser.parse(slaveProfile);
- Object counterObject = jo.get("result");
- if (counterObject instanceof JSONObject) {
- updateCounterMapWithJSONArray(slave, (JSONObject) counterObject);
+ ObjectMapper parser = new ObjectMapper();
+ JsonNode jo = parser.readTree(slaveProfile);
+ JsonNode counterObject = jo.get("result");
+ if (counterObject.isObject()) {
+ updateCounterMapWithArrayNode(slave, counterObject);
}
}
} catch (Exception e) {
@@ -150,7 +147,7 @@
* @param jo
* the Profile JSON object
*/
- private void updateCounterMapWithJSONArray(String slave, JSONObject jo) {
+ private void updateCounterMapWithArrayNode(String slave, JsonNode jo) {
for (String counterName : RESET_COUNTERS) {
updateCounter(slave, jo, counterName);
}
@@ -160,8 +157,8 @@
}
}
- private void updateCounter(String slave, JSONObject jo, String counterName) {
- Object counterObject = jo.get(counterName);
+ private void updateCounter(String slave, JsonNode jo, String counterName) {
+ JsonNode counterObject = jo.get(counterName);
long counterValue = extractCounterValue(counterObject);
// global counter
ICounter counter = getCounter(counterName, true);
@@ -171,34 +168,30 @@
localCounter.set(counterValue);
}
- private long extractCounterValue(Object counterObject) {
+ private long extractCounterValue(JsonNode counterObject) {
long counterValue = 0;
if (counterObject == null) {
return counterValue;
- } else if (counterObject instanceof JSONArray) {
- JSONArray jArray = (JSONArray) counterObject;
- Object[] values = jArray.toArray();
+ } else if (counterObject.isObject()) {
/**
* use the last non-zero value as the counter value
*/
- for (Object value : values) {
- if (value instanceof Double) {
- Double dValue = (Double) value;
- double currentVal = dValue.doubleValue();
+ for (Iterator<JsonNode> jsonIt = counterObject.iterator(); jsonIt.hasNext();) {
+ JsonNode value = jsonIt.next();
+ if (value.isDouble()) {
+ double currentVal = value.asDouble();
if (currentVal != 0) {
counterValue = (long) currentVal;
}
- } else if (value instanceof Long) {
- Long lValue = (Long) value;
- long currentVal = lValue.longValue();
+ } else if (value.isLong()) {
+ long currentVal = value.asLong();
if (currentVal != 0) {
- counterValue = lValue.longValue();
+ counterValue = currentVal;
}
}
}
} else {
- Long val = (Long) counterObject;
- counterValue = val.longValue();
+ counterValue = counterObject.asLong();
}
return counterValue;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index 350a7b3..636e1fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -102,14 +102,18 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.5</version>
</dependency>
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>18.0</version>
+ </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
index ca980b9..bf94dff 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
@@ -25,6 +25,8 @@
import java.util.Map;
import java.util.Set;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.control.common.base.INodeController;
@@ -33,8 +35,6 @@
import org.apache.hyracks.control.common.heartbeat.HeartbeatData;
import org.apache.hyracks.control.common.heartbeat.HeartbeatSchema;
import org.apache.hyracks.control.common.heartbeat.HeartbeatSchema.GarbageCollectorInfo;
-import org.json.JSONException;
-import org.json.JSONObject;
public class NodeControllerState {
private static final int RRD_SIZE = 720;
@@ -277,13 +277,13 @@
public NetworkAddress getMessagingPort() {
return messagingPort;
}
-
public int getNumCores() {
return numCores;
}
- public synchronized JSONObject toSummaryJSON() throws JSONException {
- JSONObject o = new JSONObject();
+ public synchronized ObjectNode toSummaryJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode o = om.createObjectNode();
o.put("node-id", ncConfig.nodeId);
o.put("heap-used", heapUsedSize[(rrdPtr + RRD_SIZE - 1) % RRD_SIZE]);
o.put("system-load-average", systemLoadAverage[(rrdPtr + RRD_SIZE - 1) % RRD_SIZE]);
@@ -291,8 +291,9 @@
return o;
}
- public synchronized JSONObject toDetailedJSON(boolean includeStats, boolean includeConfig) throws JSONException {
- JSONObject o = new JSONObject();
+ public synchronized ObjectNode toDetailedJSON(boolean includeStats, boolean includeConfig) {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode o = om.createObjectNode();
o.put("node-id", ncConfig.nodeId);
@@ -304,45 +305,45 @@
o.put("vm-name", vmName);
o.put("vm-version", vmVersion);
o.put("vm-vendor", vmVendor);
- o.put("classpath", classpath.split(File.pathSeparator));
- o.put("library-path", libraryPath.split(File.pathSeparator));
- o.put("boot-classpath", bootClasspath.split(File.pathSeparator));
- o.put("input-arguments", inputArguments);
- o.put("system-properties", systemProperties);
+ o.putPOJO("classpath", classpath.split(File.pathSeparator));
+ o.putPOJO("library-path", libraryPath.split(File.pathSeparator));
+ o.putPOJO("boot-classpath", bootClasspath.split(File.pathSeparator));
+ o.putPOJO("input-arguments", inputArguments);
+ o.putPOJO("system-properties", systemProperties);
o.put("pid", pid);
}
if (includeStats) {
- o.put("date", new Date());
+ o.putPOJO("date", new Date());
o.put("rrd-ptr", rrdPtr);
- o.put("heartbeat-times", hbTime);
- o.put("heap-init-sizes", heapInitSize);
- o.put("heap-used-sizes", heapUsedSize);
- o.put("heap-committed-sizes", heapCommittedSize);
- o.put("heap-max-sizes", heapMaxSize);
- o.put("nonheap-init-sizes", nonheapInitSize);
- o.put("nonheap-used-sizes", nonheapUsedSize);
- o.put("nonheap-committed-sizes", nonheapCommittedSize);
- o.put("nonheap-max-sizes", nonheapMaxSize);
- o.put("thread-counts", threadCount);
- o.put("peak-thread-counts", peakThreadCount);
- o.put("system-load-averages", systemLoadAverage);
- o.put("gc-names", gcNames);
- o.put("gc-collection-counts", gcCollectionCounts);
- o.put("gc-collection-times", gcCollectionTimes);
- o.put("net-payload-bytes-read", netPayloadBytesRead);
- o.put("net-payload-bytes-written", netPayloadBytesWritten);
- o.put("net-signaling-bytes-read", netSignalingBytesRead);
- o.put("net-signaling-bytes-written", netSignalingBytesWritten);
- o.put("dataset-net-payload-bytes-read", datasetNetPayloadBytesRead);
- o.put("dataset-net-payload-bytes-written", datasetNetPayloadBytesWritten);
- o.put("dataset-net-signaling-bytes-read", datasetNetSignalingBytesRead);
- o.put("dataset-net-signaling-bytes-written", datasetNetSignalingBytesWritten);
- o.put("ipc-messages-sent", ipcMessagesSent);
- o.put("ipc-message-bytes-sent", ipcMessageBytesSent);
- o.put("ipc-messages-received", ipcMessagesReceived);
- o.put("ipc-message-bytes-received", ipcMessageBytesReceived);
- o.put("disk-reads", diskReads);
- o.put("disk-writes", diskWrites);
+ o.putPOJO("heartbeat-times", hbTime);
+ o.putPOJO("heap-init-sizes", heapInitSize);
+ o.putPOJO("heap-used-sizes", heapUsedSize);
+ o.putPOJO("heap-committed-sizes", heapCommittedSize);
+ o.putPOJO("heap-max-sizes", heapMaxSize);
+ o.putPOJO("nonheap-init-sizes", nonheapInitSize);
+ o.putPOJO("nonheap-used-sizes", nonheapUsedSize);
+ o.putPOJO("nonheap-committed-sizes", nonheapCommittedSize);
+ o.putPOJO("nonheap-max-sizes", nonheapMaxSize);
+ o.putPOJO("thread-counts", threadCount);
+ o.putPOJO("peak-thread-counts", peakThreadCount);
+ o.putPOJO("system-load-averages", systemLoadAverage);
+ o.putPOJO("gc-names", gcNames);
+ o.putPOJO("gc-collection-counts", gcCollectionCounts);
+ o.putPOJO("gc-collection-times", gcCollectionTimes);
+ o.putPOJO("net-payload-bytes-read", netPayloadBytesRead);
+ o.putPOJO("net-payload-bytes-written", netPayloadBytesWritten);
+ o.putPOJO("net-signaling-bytes-read", netSignalingBytesRead);
+ o.putPOJO("net-signaling-bytes-written", netSignalingBytesWritten);
+ o.putPOJO("dataset-net-payload-bytes-read", datasetNetPayloadBytesRead);
+ o.putPOJO("dataset-net-payload-bytes-written", datasetNetPayloadBytesWritten);
+ o.putPOJO("dataset-net-signaling-bytes-read", datasetNetSignalingBytesRead);
+ o.putPOJO("dataset-net-signaling-bytes-written", datasetNetSignalingBytesWritten);
+ o.putPOJO("ipc-messages-sent", ipcMessagesSent);
+ o.putPOJO("ipc-message-bytes-sent", ipcMessageBytesSent);
+ o.putPOJO("ipc-messages-received", ipcMessagesReceived);
+ o.putPOJO("ipc-message-bytes-received", ipcMessageBytesReceived);
+ o.putPOJO("disk-reads", diskReads);
+ o.putPOJO("disk-writes", diskWrites);
}
return o;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/IndexPage.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/IndexPage.java
index 32ac3f5..680c2a7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/IndexPage.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/IndexPage.java
@@ -18,22 +18,23 @@
*/
package org.apache.hyracks.control.cc.adminconsole.pages;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.link.BookmarkablePageLink;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.ListView;
import org.apache.wicket.request.mapper.parameter.PageParameters;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.apache.hyracks.control.cc.ClusterControllerService;
-import org.apache.hyracks.control.cc.web.util.JSONUtils;
import org.apache.hyracks.control.cc.work.GetJobSummariesJSONWork;
import org.apache.hyracks.control.cc.work.GetNodeSummariesJSONWork;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.List;
public class IndexPage extends AbstractPage {
private static final long serialVersionUID = 1L;
@@ -43,49 +44,42 @@
GetNodeSummariesJSONWork gnse = new GetNodeSummariesJSONWork(ccs);
ccs.getWorkQueue().scheduleAndSync(gnse);
- JSONArray nodeSummaries = gnse.getSummaries();
- add(new Label("node-count", String.valueOf(nodeSummaries.length())));
- ListView<JSONObject> nodeList = new ListView<JSONObject>("node-list", JSONUtils.toList(nodeSummaries)) {
+ ArrayNode nodeSummaries = gnse.getSummaries();
+ add(new Label("node-count", String.valueOf(nodeSummaries.size())));
+ ListView<JsonNode> nodeList = new ListView<JsonNode>("node-list",
+ Lists.newArrayList(nodeSummaries.iterator())) {
private static final long serialVersionUID = 1L;
@Override
- protected void populateItem(ListItem<JSONObject> item) {
- JSONObject o = item.getModelObject();
- try {
- item.add(new Label("node-id", o.getString("node-id")));
- item.add(new Label("heap-used", o.getString("heap-used")));
- item.add(new Label("system-load-average", o.getString("system-load-average")));
- PageParameters params = new PageParameters();
- params.add("node-id", o.getString("node-id"));
- item.add(new BookmarkablePageLink<Object>("node-details", NodeDetailsPage.class, params));
- } catch (JSONException e) {
- throw new RuntimeException(e);
- }
+ protected void populateItem(ListItem<JsonNode> item) {
+ JsonNode o = item.getModelObject();
+ item.add(new Label("node-id", o.get("node-id").asText()));
+ item.add(new Label("heap-used", o.get("heap-used").asText()));
+ item.add(new Label("system-load-average", o.get("system-load-average").asText()));
+ PageParameters params = new PageParameters();
+ params.add("node-id", o.get("node-id").asText());
+ item.add(new BookmarkablePageLink<Object>("node-details", NodeDetailsPage.class, params));
}
};
add(nodeList);
GetJobSummariesJSONWork gjse = new GetJobSummariesJSONWork(ccs);
ccs.getWorkQueue().scheduleAndSync(gjse);
- JSONArray jobSummaries = gjse.getSummaries();
- ListView<JSONObject> jobList = new ListView<JSONObject>("jobs-list", JSONUtils.toList(jobSummaries)) {
+ ArrayNode jobSummaries = gjse.getSummaries();
+ ListView<JsonNode> jobList = new ListView<JsonNode>("jobs-list", Lists.newArrayList(jobSummaries.iterator())) {
private static final long serialVersionUID = 1L;
@Override
- protected void populateItem(ListItem<JSONObject> item) {
- JSONObject o = item.getModelObject();
- try {
- item.add(new Label("job-id", o.getString("job-id")));
- item.add(new Label("status", o.getString("status")));
- item.add(new Label("create-time", longToDateString(Long.parseLong(o.getString("create-time")))));
- item.add(new Label("start-time", longToDateString(Long.parseLong(o.getString("start-time")))));
- item.add(new Label("end-time", longToDateString(Long.parseLong(o.getString("end-time")))));
- PageParameters params = new PageParameters();
- params.add("job-id", o.getString("job-id"));
- item.add(new BookmarkablePageLink<Object>("job-details", JobDetailsPage.class, params));
- } catch (JSONException e) {
- throw new RuntimeException(e);
- }
+ protected void populateItem(ListItem<JsonNode> item) {
+ JsonNode o = item.getModelObject();
+ item.add(new Label("job-id", o.get("job-id").asText()));
+ item.add(new Label("status", o.get("status").asText()));
+ item.add(new Label("create-time", longToDateString(Long.parseLong(o.get("create-time").asText()))));
+ item.add(new Label("start-time", longToDateString(Long.parseLong(o.get("start-time").asText()))));
+ item.add(new Label("end-time", longToDateString(Long.parseLong(o.get("end-time").asText()))));
+ PageParameters params = new PageParameters();
+ params.add("job-id", o.get("job-id"));
+ item.add(new BookmarkablePageLink<Object>("job-details", JobDetailsPage.class, params));
}
};
add(jobList);
@@ -99,4 +93,4 @@
Date date = new Date(milliseconds);
return sdf.format(date);
}
-}
\ No newline at end of file
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java
index 7898122..abc07d9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/adminconsole/pages/JobDetailsPage.java
@@ -26,11 +26,12 @@
import java.util.List;
import java.util.Map;
+import com.fasterxml.jackson.databind.JsonNode;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.util.string.StringValue;
-import org.json.JSONArray;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
@@ -58,19 +59,19 @@
jag.setEscapeModelStrings(false);
add(jag);
- JSONObject jagO = gacgw.getJSON();
+ ObjectNode jagO = gacgw.getJSON();
Map<ActivityId, String> activityMap = new HashMap<ActivityId, String>();
if (jagO.has("activity-clusters")) {
- JSONArray acArray = jagO.getJSONArray("activity-clusters");
- for (int j = 0; j < acArray.length(); ++j) {
- JSONObject acO = acArray.getJSONObject(j);
+ JsonNode acArray = jagO.get("activity-clusters");
+ for (int j = 0; j < acArray.size(); ++j) {
+ JsonNode acO = acArray.get(j);
if (acO.has("activities")) {
- JSONArray aArray = acO.getJSONArray("activities");
- for (int i = 0; i < aArray.length(); ++i) {
- JSONObject aO = aArray.getJSONObject(i);
- ActivityId aid = ActivityId.parse(aO.getString("id"));
- String className = aO.getString("java-class");
+ JsonNode aArray = acO.get("activities");
+ for (int i = 0; i < aArray.size(); ++i) {
+ JsonNode aO = aArray.get(i);
+ ActivityId aid = ActivityId.parse(aO.get("id").asText());
+ String className = aO.get("java-class").asText();
activityMap.put(aid, className);
}
@@ -84,30 +85,30 @@
jobrun.setEscapeModelStrings(false);
add(jobrun);
- JSONObject jrO = gjrw.getJSON();
+ ObjectNode jrO = gjrw.getJSON();
List<TaskClusterAttempt[]> tcList = new ArrayList<TaskClusterAttempt[]>();
long minTime = Long.MAX_VALUE;
long maxTime = Long.MIN_VALUE;
if (jrO.has("activity-clusters")) {
- JSONArray acA = jrO.getJSONArray("activity-clusters");
- for (int i = 0; i < acA.length(); ++i) {
- JSONObject acO = acA.getJSONObject(i);
+ JsonNode acA = jrO.get("activity-clusters");
+ for (int i = 0; i < acA.size(); ++i) {
+ JsonNode acO = acA.get(i);
if (acO.has("plan")) {
- JSONObject planO = acO.getJSONObject("plan");
+ JsonNode planO = acO.get("plan");
if (planO.has("task-clusters")) {
- JSONArray tcA = planO.getJSONArray("task-clusters");
- for (int j = 0; j < tcA.length(); ++j) {
- JSONObject tcO = tcA.getJSONObject(j);
- String tcId = tcO.getString("task-cluster-id");
+ JsonNode tcA = planO.get("task-clusters");
+ for (int j = 0; j < tcA.size(); ++j) {
+ JsonNode tcO = tcA.get(j);
+ String tcId = tcO.get("task-cluster-id").asText();
if (tcO.has("attempts")) {
- JSONArray tcaA = tcO.getJSONArray("attempts");
- TaskClusterAttempt[] tcAttempts = new TaskClusterAttempt[tcaA.length()];
- for (int k = 0; k < tcaA.length(); ++k) {
- JSONObject tcaO = tcaA.getJSONObject(k);
- int attempt = tcaO.getInt("attempt");
- long startTime = tcaO.getLong("start-time");
- long endTime = tcaO.getLong("end-time");
+ JsonNode tcaA = tcO.get("attempts");
+ TaskClusterAttempt[] tcAttempts = new TaskClusterAttempt[tcaA.size()];
+ for (int k = 0; k < tcaA.size(); ++k) {
+ JsonNode tcaO = tcaA.get(k);
+ int attempt = tcaO.get("attempt").asInt();
+ long startTime = tcaO.get("start-time").asLong();
+ long endTime = tcaO.get("end-time").asLong();
tcAttempts[k] = new TaskClusterAttempt(tcId, attempt, startTime, endTime);
if (startTime < minTime) {
@@ -117,13 +118,13 @@
maxTime = endTime;
}
if (tcaO.has("task-attempts")) {
- JSONArray taArray = tcaO.getJSONArray("task-attempts");
- tcAttempts[k].tasks = new TaskAttempt[taArray.length()];
- for (int l = 0; l < taArray.length(); ++l) {
- JSONObject taO = taArray.getJSONObject(l);
- TaskAttemptId taId = TaskAttemptId.parse(taO.getString("task-attempt-id"));
- TaskAttempt ta = new TaskAttempt(taId, taO.getLong("start-time"),
- taO.getLong("end-time"));
+ JsonNode taArray = tcaO.get("task-attempts");
+ tcAttempts[k].tasks = new TaskAttempt[taArray.size()];
+ for (int l = 0; l < taArray.size(); ++l) {
+ JsonNode taO = taArray.get(l);
+ TaskAttemptId taId = TaskAttemptId.parse(taO.get("task-attempt-id").asText());
+ TaskAttempt ta = new TaskAttempt(taId, taO.get("start-time").asLong(),
+ taO.get("end-time").asLong());
tcAttempts[k].tasks[l] = ta;
TaskId tid = taId.getTaskId();
ta.name = activityMap.get(tid.getActivityId());
@@ -154,31 +155,31 @@
Map<TaskAttemptId, TaskProfile> tpMap = new HashMap<TaskAttemptId, TaskProfile>();
if (jrO.has("profile")) {
- JSONObject pO = jrO.getJSONObject("profile");
+ JsonNode pO = jrO.get("profile");
if (pO.has("joblets")) {
- JSONArray jobletsA = pO.getJSONArray("joblets");
- for (int i = 0; i < jobletsA.length(); ++i) {
- JSONObject jobletO = jobletsA.getJSONObject(i);
+ JsonNode jobletsA = pO.get("joblets");
+ for (int i = 0; i < jobletsA.size(); ++i) {
+ JsonNode jobletO = jobletsA.get(i);
if (jobletO.has("tasks")) {
- JSONArray tasksA = jobletO.getJSONArray("tasks");
- for (int j = 0; j < tasksA.length(); ++j) {
- JSONObject taskO = tasksA.getJSONObject(j);
- ActivityId activityId = ActivityId.parse(taskO.getString("activity-id"));
- int partition = taskO.getInt("partition");
- int attempt = taskO.getInt("attempt");
+ JsonNode tasksA = jobletO.get("tasks");
+ for (int j = 0; j < tasksA.size(); ++j) {
+ JsonNode taskO = tasksA.get(j);
+ ActivityId activityId = ActivityId.parse(taskO.get("activity-id").asText());
+ int partition = taskO.get("partition").asInt();
+ int attempt = taskO.get("attempt").asInt();
TaskAttemptId taId = new TaskAttemptId(new TaskId(activityId, partition), attempt);
if (taskO.has("partition-send-profile")) {
- JSONArray taskProfilesA = taskO.getJSONArray("partition-send-profile");
- for (int k = 0; k < taskProfilesA.length(); ++k) {
- JSONObject ppO = taskProfilesA.getJSONObject(k);
- long openTime = ppO.getLong("open-time");
- long closeTime = ppO.getLong("close-time");
- int resolution = ppO.getInt("resolution");
- long offset = ppO.getLong("offset");
- JSONArray frameTimesA = ppO.getJSONArray("frame-times");
- long[] frameTimes = new long[frameTimesA.length()];
+ JsonNode taskProfilesA = taskO.get("partition-send-profile");
+ for (int k = 0; k < taskProfilesA.size(); ++k) {
+ JsonNode ppO = taskProfilesA.get(k);
+ long openTime = ppO.get("open-time").asLong();
+ long closeTime = ppO.get("close-time").asLong();
+ int resolution = ppO.get("resolution").asInt();
+ long offset = ppO.get("offset").asLong();
+ JsonNode frameTimesA = ppO.get("frame-times");
+ long[] frameTimes = new long[frameTimesA.size()];
for (int l = 0; l < frameTimes.length; ++l) {
- frameTimes[l] = frameTimesA.getInt(l) + offset;
+ frameTimes[l] = frameTimesA.get(l).asLong() + offset;
}
TaskProfile tp = new TaskProfile(taId, openTime, closeTime, frameTimes, resolution);
if (!tpMap.containsKey(tp.taId)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
index dbe4202..f1d04bb 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
@@ -28,10 +28,9 @@
import java.util.Map;
import java.util.Set;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
@@ -237,131 +236,132 @@
return connectorPolicyMap;
}
- public JSONObject toJSON() throws JSONException {
- JSONObject result = new JSONObject();
+ public ObjectNode toJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode result = om.createObjectNode();
result.put("job-id", jobId.toString());
- result.put("status", getStatus());
+ result.putPOJO("status", getStatus());
result.put("create-time", getCreateTime());
result.put("start-time", getStartTime());
result.put("end-time", getEndTime());
- JSONArray aClusters = new JSONArray();
+ ArrayNode aClusters = om.createArrayNode();
for (ActivityCluster ac : acg.getActivityClusterMap().values()) {
- JSONObject acJSON = new JSONObject();
+ ObjectNode acJSON = om.createObjectNode();
acJSON.put("activity-cluster-id", String.valueOf(ac.getId()));
- JSONArray activitiesJSON = new JSONArray();
+ ArrayNode activitiesJSON = om.createArrayNode();
for (ActivityId aid : ac.getActivityMap().keySet()) {
- activitiesJSON.put(aid);
+ activitiesJSON.addPOJO(aid);
}
- acJSON.put("activities", activitiesJSON);
+ acJSON.set("activities", activitiesJSON);
- JSONArray dependenciesJSON = new JSONArray();
+ ArrayNode dependenciesJSON = om.createArrayNode();
for (ActivityCluster dependency : ac.getDependencies()) {
- dependenciesJSON.put(String.valueOf(dependency.getId()));
+ dependenciesJSON.add(String.valueOf(dependency.getId()));
}
- acJSON.put("dependencies", dependenciesJSON);
+ acJSON.set("dependencies", dependenciesJSON);
ActivityClusterPlan acp = activityClusterPlanMap.get(ac.getId());
if (acp == null) {
- acJSON.put("plan", (Object) null);
+ acJSON.putNull("plan");
} else {
- JSONObject planJSON = new JSONObject();
+ ObjectNode planJSON = om.createObjectNode();
- JSONArray acTasks = new JSONArray();
+ ArrayNode acTasks = om.createArrayNode();
for (Map.Entry<ActivityId, ActivityPlan> e : acp.getActivityPlanMap().entrySet()) {
ActivityPlan acPlan = e.getValue();
- JSONObject entry = new JSONObject();
+ ObjectNode entry = om.createObjectNode();
entry.put("activity-id", e.getKey().toString());
ActivityPartitionDetails apd = acPlan.getActivityPartitionDetails();
entry.put("partition-count", apd.getPartitionCount());
- JSONArray inPartCountsJSON = new JSONArray();
+ ArrayNode inPartCountsJSON = om.createArrayNode();
int[] inPartCounts = apd.getInputPartitionCounts();
if (inPartCounts != null) {
for (int i : inPartCounts) {
- inPartCountsJSON.put(i);
+ inPartCountsJSON.add(i);
}
}
- entry.put("input-partition-counts", inPartCountsJSON);
+ entry.set("input-partition-counts", inPartCountsJSON);
- JSONArray outPartCountsJSON = new JSONArray();
+ ArrayNode outPartCountsJSON = om.createArrayNode();
int[] outPartCounts = apd.getOutputPartitionCounts();
if (outPartCounts != null) {
for (int o : outPartCounts) {
- outPartCountsJSON.put(o);
+ outPartCountsJSON.add(o);
}
}
- entry.put("output-partition-counts", outPartCountsJSON);
+ entry.set("output-partition-counts", outPartCountsJSON);
- JSONArray tasks = new JSONArray();
+ ArrayNode tasks = om.createArrayNode();
for (Task t : acPlan.getTasks()) {
- JSONObject task = new JSONObject();
+ ObjectNode task = om.createObjectNode();
task.put("task-id", t.getTaskId().toString());
- JSONArray dependentTasksJSON = new JSONArray();
+ ArrayNode dependentTasksJSON = om.createArrayNode();
for (TaskId dependent : t.getDependents()) {
- dependentTasksJSON.put(dependent.toString());
- }
- task.put("dependents", dependentTasksJSON);
+ dependentTasksJSON.add(dependent.toString());
+ task.set("dependents", dependentTasksJSON);
- JSONArray dependencyTasksJSON = new JSONArray();
+ ArrayNode dependencyTasksJSON = om.createArrayNode();
for (TaskId dependency : t.getDependencies()) {
- dependencyTasksJSON.put(dependency.toString());
+ dependencyTasksJSON.add(dependency.toString());
}
- task.put("dependencies", dependencyTasksJSON);
+ task.set("dependencies", dependencyTasksJSON);
- tasks.put(task);
+ tasks.add(task);
}
- entry.put("tasks", tasks);
+ entry.set("tasks", tasks);
- acTasks.put(entry);
+ acTasks.add(entry);
+ }
}
- planJSON.put("activities", acTasks);
+ planJSON.set("activities", acTasks);
- JSONArray tClusters = new JSONArray();
+ ArrayNode tClusters = om.createArrayNode();
for (TaskCluster tc : acp.getTaskClusters()) {
- JSONObject c = new JSONObject();
+ ObjectNode c = om.createObjectNode();
c.put("task-cluster-id", String.valueOf(tc.getTaskClusterId()));
- JSONArray tasks = new JSONArray();
+ ArrayNode tasksAry = om.createArrayNode();
for (Task t : tc.getTasks()) {
- tasks.put(t.getTaskId().toString());
+ tasksAry.add(t.getTaskId().toString());
}
- c.put("tasks", tasks);
+ c.set("tasks", tasksAry);
- JSONArray prodParts = new JSONArray();
+ ArrayNode prodParts = om.createArrayNode();
for (PartitionId p : tc.getProducedPartitions()) {
- prodParts.put(p.toString());
+ prodParts.add(p.toString());
}
- c.put("produced-partitions", prodParts);
+ c.set("produced-partitions", prodParts);
- JSONArray reqdParts = new JSONArray();
+ ArrayNode reqdParts = om.createArrayNode();
for (PartitionId p : tc.getRequiredPartitions()) {
- reqdParts.put(p.toString());
+ reqdParts.add(p.toString());
}
- c.put("required-partitions", reqdParts);
+ c.set("required-partitions", reqdParts);
- JSONArray attempts = new JSONArray();
+ ArrayNode attempts = om.createArrayNode();
List<TaskClusterAttempt> tcAttempts = tc.getAttempts();
if (tcAttempts != null) {
for (TaskClusterAttempt tca : tcAttempts) {
- JSONObject attempt = new JSONObject();
+ ObjectNode attempt = om.createObjectNode();
attempt.put("attempt", tca.getAttempt());
- attempt.put("status", tca.getStatus());
+ attempt.putPOJO("status", tca.getStatus());
attempt.put("start-time", tca.getStartTime());
attempt.put("end-time", tca.getEndTime());
- JSONArray taskAttempts = new JSONArray();
+ ArrayNode taskAttempts = om.createArrayNode();
for (TaskAttempt ta : tca.getTaskAttempts().values()) {
- JSONObject taskAttempt = new JSONObject();
- taskAttempt.put("task-id", ta.getTaskAttemptId().getTaskId());
- taskAttempt.put("task-attempt-id", ta.getTaskAttemptId());
- taskAttempt.put("status", ta.getStatus());
+ ObjectNode taskAttempt = om.createObjectNode();
+ taskAttempt.putPOJO("task-id", ta.getTaskAttemptId().getTaskId());
+ taskAttempt.putPOJO("task-attempt-id", ta.getTaskAttemptId());
+ taskAttempt.putPOJO("status", ta.getStatus());
taskAttempt.put("node-id", ta.getNodeId());
taskAttempt.put("start-time", ta.getStartTime());
taskAttempt.put("end-time", ta.getEndTime());
@@ -374,26 +374,26 @@
taskAttempt.put("failure-details", exceptionWriter.toString());
}
}
- taskAttempts.put(taskAttempt);
+ taskAttempts.add(taskAttempt);
}
- attempt.put("task-attempts", taskAttempts);
+ attempt.set("task-attempts", taskAttempts);
- attempts.put(attempt);
+ attempts.add(attempt);
}
}
- c.put("attempts", attempts);
+ c.set("attempts", attempts);
- tClusters.put(c);
+ tClusters.add(c);
}
- planJSON.put("task-clusters", tClusters);
+ planJSON.set("task-clusters", tClusters);
- acJSON.put("plan", planJSON);
+ acJSON.set("plan", planJSON);
}
- aClusters.put(acJSON);
+ aClusters.add(acJSON);
}
- result.put("activity-clusters", aClusters);
+ result.set("activity-clusters", aClusters);
- result.put("profile", profile.toJSON());
+ result.set("profile", profile.toJSON());
return result;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/JobScheduler.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/JobScheduler.java
index ab026eb..b577ff7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/JobScheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/scheduler/JobScheduler.java
@@ -31,9 +31,8 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.comm.NetworkAddress;
import org.apache.hyracks.api.constraints.Constraint;
import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
@@ -80,6 +79,7 @@
private final Set<TaskCluster> inProgressTaskClusters;
+
public JobScheduler(ClusterControllerService ccs, JobRun jobRun, Collection<Constraint> constraints) {
this.ccs = ccs;
this.jobRun = jobRun;
@@ -734,15 +734,12 @@
}
}
- private JSONObject createJobLogObject(final JobRun run) {
- JSONObject jobLogObject = new JSONObject();
- try {
- ActivityClusterGraph acg = run.getActivityClusterGraph();
- jobLogObject.put("activity-cluster-graph", acg.toJSON());
- jobLogObject.put("job-run", run.toJSON());
- } catch (JSONException e) {
- throw new RuntimeException(e);
- }
+ private ObjectNode createJobLogObject(final JobRun run) {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode jobLogObject = om.createObjectNode();
+ ActivityClusterGraph acg = run.getActivityClusterGraph();
+ jobLogObject.set("activity-cluster-graph", acg.toJSON());
+ jobLogObject.set("job-run", run.toJSON());
return jobLogObject;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/JobsRESTAPIFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/JobsRESTAPIFunction.java
index bab77ca..e69884a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/JobsRESTAPIFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/JobsRESTAPIFunction.java
@@ -18,7 +18,8 @@
*/
package org.apache.hyracks.control.cc.web;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -35,8 +36,9 @@
}
@Override
- public JSONObject invoke(String[] arguments) throws Exception {
- JSONObject result = new JSONObject();
+ public ObjectNode invoke(String[] arguments) throws Exception {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode result = om.createObjectNode();
switch (arguments.length) {
case 1:
if (!"".equals(arguments[0])) {
@@ -45,7 +47,7 @@
case 0: {
GetJobSummariesJSONWork gjse = new GetJobSummariesJSONWork(ccs);
ccs.getWorkQueue().scheduleAndSync(gjse);
- result.put("result", gjse.getSummaries());
+ result.set("result", gjse.getSummaries());
break;
}
@@ -55,11 +57,11 @@
if ("job-activity-graph".equalsIgnoreCase(arguments[1])) {
GetActivityClusterGraphJSONWork gjage = new GetActivityClusterGraphJSONWork(ccs, jobId);
ccs.getWorkQueue().scheduleAndSync(gjage);
- result.put("result", gjage.getJSON());
+ result.set("result", gjage.getJSON());
} else if ("job-run".equalsIgnoreCase(arguments[1])) {
GetJobRunJSONWork gjre = new GetJobRunJSONWork(ccs, jobId);
ccs.getWorkQueue().scheduleAndSync(gjre);
- result.put("result", gjre.getJSON());
+ result.set("result", gjre.getJSON());
}
break;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/NodesRESTAPIFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/NodesRESTAPIFunction.java
index 58deb55..8994895 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/NodesRESTAPIFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/NodesRESTAPIFunction.java
@@ -18,7 +18,8 @@
*/
package org.apache.hyracks.control.cc.web;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.web.util.IJSONOutputFunction;
@@ -33,19 +34,20 @@
}
@Override
- public JSONObject invoke(String[] arguments) throws Exception {
- JSONObject result = new JSONObject();
+ public ObjectNode invoke(String[] arguments) throws Exception {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode result = om.createObjectNode();
switch (arguments.length) {
case 1: {
if ("".equals(arguments[0])) {
GetNodeSummariesJSONWork gnse = new GetNodeSummariesJSONWork(ccs);
ccs.getWorkQueue().scheduleAndSync(gnse);
- result.put("result", gnse.getSummaries());
+ result.set("result", gnse.getSummaries());
} else {
String nodeId = arguments[0];
GetNodeDetailsJSONWork gnde = new GetNodeDetailsJSONWork(ccs, nodeId, true, true);
ccs.getWorkQueue().scheduleAndSync(gnde);
- result.put("result", gnde.getDetail());
+ result.set("result", gnde.getDetail());
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
index 000cf46..0657f59 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/StateDumpRESTAPIFunction.java
@@ -20,7 +20,8 @@
import java.util.Map;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.web.util.IJSONOutputFunction;
@@ -35,13 +36,14 @@
}
@Override
- public JSONObject invoke(String[] arguments) throws Exception {
+ public ObjectNode invoke(String[] arguments) throws Exception {
GatherStateDumpsWork gsdw = new GatherStateDumpsWork(ccs);
ccs.getWorkQueue().scheduleAndSync(gsdw);
StateDumpRun sdr = gsdw.getStateDumpRun();
sdr.waitForCompletion();
- JSONObject result = new JSONObject();
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode result = om.createObjectNode();
for (Map.Entry<String, String> e : sdr.getStateDump().entrySet()) {
result.put(e.getKey(), e.getValue());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/IJSONOutputFunction.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/IJSONOutputFunction.java
index f55bf84..ab0a581 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/IJSONOutputFunction.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/IJSONOutputFunction.java
@@ -18,8 +18,8 @@
*/
package org.apache.hyracks.control.cc.web.util;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.node.ObjectNode;
public interface IJSONOutputFunction {
- public JSONObject invoke(String[] arguments) throws Exception;
+ public ObjectNode invoke(String[] arguments) throws Exception;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
index b00eeae..e736406 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestHandler.java
@@ -24,9 +24,10 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.handler.AbstractHandler;
-import org.json.JSONObject;
public class JSONOutputRequestHandler extends AbstractHandler {
private final IJSONOutputFunction fn;
@@ -46,9 +47,10 @@
}
String[] parts = target.split("/");
try {
- JSONObject result = fn.invoke(parts);
+ ObjectNode result = fn.invoke(parts);
response.setContentType("application/json");
- result.write(response.getWriter());
+ ObjectMapper om = new ObjectMapper();
+ om.writer().writeValue(response.getWriter(),result);
baseRequest.setHandled(true);
} catch (Exception e) {
e.printStackTrace();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONUtils.java
deleted file mode 100644
index 5da4d9d..0000000
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONUtils.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.control.cc.web.util;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-public class JSONUtils {
- public static List<JSONObject> toList(JSONArray array) throws JSONException {
- List<JSONObject> list = new ArrayList<JSONObject>();
- for (int i = 0; i < array.length(); ++i) {
- list.add((JSONObject) array.get(i));
- }
- return list;
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java
index 951e67e..294ae97 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetActivityClusterGraphJSONWork.java
@@ -18,7 +18,8 @@
*/
package org.apache.hyracks.control.cc.work;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -28,7 +29,7 @@
public class GetActivityClusterGraphJSONWork extends SynchronizableWork {
private final ClusterControllerService ccs;
private final JobId jobId;
- private JSONObject json;
+ private ObjectNode json;
public GetActivityClusterGraphJSONWork(ClusterControllerService ccs, JobId jobId) {
this.ccs = ccs;
@@ -37,18 +38,20 @@
@Override
protected void doRun() throws Exception {
+
+ ObjectMapper om = new ObjectMapper();
JobRun run = ccs.getActiveRunMap().get(jobId);
if (run == null) {
run = ccs.getRunMapArchive().get(jobId);
if (run == null) {
- json = new JSONObject();
+ json = om.createObjectNode();
return;
}
}
json = run.getActivityClusterGraph().toJSON();
}
- public JSONObject getJSON() {
+ public ObjectNode getJSON() {
return json;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobRunJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobRunJSONWork.java
index f2119c3..aad6edf 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobRunJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobRunJSONWork.java
@@ -18,7 +18,8 @@
*/
package org.apache.hyracks.control.cc.work;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -28,7 +29,7 @@
public class GetJobRunJSONWork extends SynchronizableWork {
private final ClusterControllerService ccs;
private final JobId jobId;
- private JSONObject json;
+ private ObjectNode json;
public GetJobRunJSONWork(ClusterControllerService ccs, JobId jobId) {
this.ccs = ccs;
@@ -37,18 +38,19 @@
@Override
protected void doRun() throws Exception {
+ ObjectMapper om = new ObjectMapper();
JobRun run = ccs.getActiveRunMap().get(jobId);
if (run == null) {
run = ccs.getRunMapArchive().get(jobId);
if (run == null) {
- json = new JSONObject();
+ json = om.createObjectNode();
return;
}
}
json = run.toJSON();
}
- public JSONObject getJSON() {
+ public ObjectNode getJSON() {
return json;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
index bc4468a..1e5a3a5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
@@ -20,17 +20,16 @@
import java.util.Collection;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.job.JobRun;
import org.apache.hyracks.control.common.work.SynchronizableWork;
public class GetJobSummariesJSONWork extends SynchronizableWork {
private final ClusterControllerService ccs;
- private JSONArray summaries;
+ private ArrayNode summaries;
public GetJobSummariesJSONWork(ClusterControllerService ccs) {
this.ccs = ccs;
@@ -38,25 +37,27 @@
@Override
protected void doRun() throws Exception {
- summaries = new JSONArray();
+ ObjectMapper om = new ObjectMapper();
+ summaries = om.createArrayNode();
populateJSON(ccs.getActiveRunMap().values());
populateJSON(ccs.getRunMapArchive().values());
}
- private void populateJSON(Collection<JobRun> jobRuns) throws JSONException {
+ private void populateJSON(Collection<JobRun> jobRuns) {
+ ObjectMapper om = new ObjectMapper();
for (JobRun run : jobRuns) {
- JSONObject jo = new JSONObject();
+ ObjectNode jo = om.createObjectNode();
jo.put("type", "job-summary");
jo.put("job-id", run.getJobId().toString());
jo.put("create-time", run.getCreateTime());
jo.put("start-time", run.getStartTime());
jo.put("end-time", run.getEndTime());
jo.put("status", run.getStatus().toString());
- summaries.put(jo);
+ summaries.add(jo);
}
}
- public JSONArray getSummaries() {
+ public ArrayNode getSummaries() {
return summaries;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
index dab41ba..4a8dd1f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
@@ -35,13 +35,13 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
import org.apache.hyracks.control.common.utils.PidHelper;
import org.apache.hyracks.control.common.work.IPCResponder;
import org.apache.hyracks.control.common.work.SynchronizableWork;
-import org.json.JSONException;
-import org.json.JSONObject;
import org.kohsuke.args4j.Option;
public class GetNodeDetailsJSONWork extends SynchronizableWork {
@@ -51,7 +51,8 @@
private final boolean includeStats;
private final boolean includeConfig;
private final IPCResponder<String> callback;
- private JSONObject detail;
+ private ObjectNode detail;
+ private ObjectMapper om = new ObjectMapper();
public GetNodeDetailsJSONWork(ClusterControllerService ccs, String nodeId, boolean includeStats,
boolean includeConfig, IPCResponder<String> callback) {
@@ -86,12 +87,12 @@
}
if (callback != null) {
- callback.setValue(detail == null ? null : detail.toString());
+ callback.setValue(detail == null ? null : om.writeValueAsString(detail));
}
}
- private JSONObject getCCDetails() throws JSONException {
- JSONObject o = new JSONObject();
+ private ObjectNode getCCDetails() {
+ ObjectNode o = om.createObjectNode();
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
List<GarbageCollectorMXBean> gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
@@ -106,29 +107,29 @@
o.put("vm_name", runtimeMXBean.getVmName());
o.put("vm_version", runtimeMXBean.getVmVersion());
o.put("vm_vendor", runtimeMXBean.getVmVendor());
- o.put("classpath", runtimeMXBean.getClassPath().split(File.pathSeparator));
- o.put("library_path", runtimeMXBean.getLibraryPath().split(File.pathSeparator));
- o.put("boot_classpath", runtimeMXBean.getBootClassPath().split(File.pathSeparator));
- o.put("input_arguments", runtimeMXBean.getInputArguments());
- o.put("system_properties", runtimeMXBean.getSystemProperties());
+ o.putPOJO("classpath", runtimeMXBean.getClassPath().split(File.pathSeparator));
+ o.putPOJO("library_path", runtimeMXBean.getLibraryPath().split(File.pathSeparator));
+ o.putPOJO("boot_classpath", runtimeMXBean.getBootClassPath().split(File.pathSeparator));
+ o.putPOJO("input_arguments", runtimeMXBean.getInputArguments());
+ o.putPOJO("system_properties", runtimeMXBean.getSystemProperties());
o.put("pid", PidHelper.getPid());
}
if (includeStats) {
MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
MemoryUsage nonheapUsage = memoryMXBean.getNonHeapMemoryUsage();
- List<JSONObject> gcs = new ArrayList<>();
+ List<ObjectNode> gcs = new ArrayList<>();
for (GarbageCollectorMXBean gcMXBean : gcMXBeans) {
- JSONObject gc = new JSONObject();
+ ObjectNode gc = om.createObjectNode();
gc.put("name", gcMXBean.getName());
gc.put("collection-time", gcMXBean.getCollectionTime());
gc.put("collection-count", gcMXBean.getCollectionCount());
gcs.add(gc);
}
- o.put("gcs", gcs);
+ o.putPOJO("gcs", gcs);
- o.put("date", new Date());
+ o.put("date", new Date().toString());
o.put("heap_init_size", heapUsage.getInit());
o.put("heap_used_size", heapUsage.getUsed());
o.put("heap_committed_size", heapUsage.getCommitted());
@@ -145,7 +146,7 @@
return o;
}
- private static void addIni(JSONObject o, Object configBean) throws JSONException {
+ private static void addIni(ObjectNode o, Object configBean) {
Map<String, Object> iniMap = new HashMap<>();
for (Field f : configBean.getClass().getFields()) {
Option option = f.getAnnotation(Option.class);
@@ -170,10 +171,10 @@
}
}
}
- o.put("ini", iniMap);
+ o.putPOJO("ini", iniMap);
}
- public JSONObject getDetail() {
+ public ObjectNode getDetail() {
return detail;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeSummariesJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeSummariesJSONWork.java
index 4b81ad0..ebafd7d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeSummariesJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeSummariesJSONWork.java
@@ -18,7 +18,8 @@
*/
package org.apache.hyracks.control.cc.work;
-import org.json.JSONArray;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
@@ -26,7 +27,7 @@
public class GetNodeSummariesJSONWork extends SynchronizableWork {
private final ClusterControllerService ccs;
- private JSONArray summaries;
+ private ArrayNode summaries;
public GetNodeSummariesJSONWork(ClusterControllerService ccs) {
this.ccs = ccs;
@@ -34,13 +35,14 @@
@Override
protected void doRun() throws Exception {
- summaries = new JSONArray();
+ ObjectMapper om = new ObjectMapper();
+ summaries = om.createArrayNode();
for (NodeControllerState ncs : ccs.getNodeMap().values()) {
- summaries.put(ncs.toSummaryJSON());
+ summaries.add(ncs.toSummaryJSON());
}
}
- public JSONArray getSummaries() {
+ public ArrayNode getSummaries() {
return summaries;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
index bd85463..2a383b6 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobCleanupWork.java
@@ -23,9 +23,8 @@
import java.util.Set;
import java.util.logging.Logger;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.ActivityClusterGraph;
import org.apache.hyracks.api.job.JobId;
@@ -123,15 +122,12 @@
}
}
- private JSONObject createJobLogObject(final JobRun run) {
- JSONObject jobLogObject = new JSONObject();
- try {
- ActivityClusterGraph acg = run.getActivityClusterGraph();
- jobLogObject.put("activity-cluster-graph", acg.toJSON());
- jobLogObject.put("job-run", run.toJSON());
- } catch (JSONException e) {
- throw new RuntimeException(e);
- }
+ private ObjectNode createJobLogObject(final JobRun run) {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode jobLogObject = om.createObjectNode();
+ ActivityClusterGraph acg = run.getActivityClusterGraph();
+ jobLogObject.set("activity-cluster-graph", acg.toJSON());
+ jobLogObject.set("job-run", run.toJSON());
return jobLogObject;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
index 9f7d9dd..1c8ae20 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -58,11 +58,6 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.5</version>
@@ -72,5 +67,9 @@
<artifactId>httpclient</artifactId>
<version>4.5.2</version>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
index 525be8c..d1d33a5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
@@ -26,9 +26,9 @@
import java.util.Map;
import java.util.Map.Entry;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.io.IWritable;
@@ -45,17 +45,18 @@
return counters;
}
- public abstract JSONObject toJSON() throws JSONException;
+ public abstract ObjectNode toJSON() ;
- protected void populateCounters(JSONObject jo) throws JSONException {
- JSONArray countersObj = new JSONArray();
+ protected void populateCounters(ObjectNode jo) {
+ ObjectMapper om = new ObjectMapper();
+ ArrayNode countersObj = om.createArrayNode();
for (Map.Entry<String, Long> e : counters.entrySet()) {
- JSONObject jpe = new JSONObject();
+ ObjectNode jpe = om.createObjectNode();
jpe.put("name", e.getKey());
jpe.put("value", e.getValue());
- countersObj.put(jpe);
+ countersObj.add(jpe);
}
- jo.put("counters", countersObj);
+ jo.set("counters", countersObj);
}
protected void merge(AbstractProfile profile) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
index e8fec1b..79a5538 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
@@ -25,10 +25,9 @@
import java.util.Map;
import java.util.Map.Entry;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.job.JobId;
public class JobProfile extends AbstractProfile {
@@ -62,16 +61,17 @@
}
@Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = new JSONObject();
+ public ObjectNode toJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode json = om.createObjectNode();
json.put("job-id", jobId.toString());
populateCounters(json);
- JSONArray jobletsArray = new JSONArray();
+ ArrayNode jobletsArray = om.createArrayNode();
for (JobletProfile p : jobletProfiles.values()) {
- jobletsArray.put(p.toJSON());
+ jobletsArray.add(p.toJSON());
}
- json.put("joblets", jobletsArray);
+ json.set("joblets", jobletsArray);
return json;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
index 33d5a6c..c3792df 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
@@ -25,10 +25,10 @@
import java.util.Map;
import java.util.Map.Entry;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
public class JobletProfile extends AbstractProfile {
@@ -62,16 +62,18 @@
}
@Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = new JSONObject();
+ public ObjectNode toJSON() {
+
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode json = om.createObjectNode();
json.put("node-id", nodeId.toString());
populateCounters(json);
- JSONArray tasks = new JSONArray();
+ ArrayNode tasks = om.createArrayNode();
for (TaskProfile p : taskProfiles.values()) {
- tasks.put(p.toJSON());
+ tasks.add(p.toJSON());
}
- json.put("tasks", tasks);
+ json.set("tasks", tasks);
return json;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
index a77c56f..680d2f9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
@@ -25,10 +25,9 @@
import java.util.Map;
import java.util.Map.Entry;
-import org.json.JSONArray;
-import org.json.JSONException;
-import org.json.JSONObject;
-
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.dataflow.TaskAttemptId;
import org.apache.hyracks.api.partitions.PartitionId;
import org.apache.hyracks.control.common.job.profiling.counters.MultiResolutionEventProfiler;
@@ -64,39 +63,41 @@
}
@Override
- public JSONObject toJSON() throws JSONException {
- JSONObject json = new JSONObject();
+ public ObjectNode toJSON() {
+
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode json = om.createObjectNode();
json.put("activity-id", taskAttemptId.getTaskId().getActivityId().toString());
json.put("partition", taskAttemptId.getTaskId().getPartition());
json.put("attempt", taskAttemptId.getAttempt());
if (partitionSendProfile != null) {
- JSONArray pspArray = new JSONArray();
+ ArrayNode pspArray = om.createArrayNode();
for (PartitionProfile pp : partitionSendProfile.values()) {
- JSONObject ppObj = new JSONObject();
+ ObjectNode ppObj = om.createObjectNode();
PartitionId pid = pp.getPartitionId();
- JSONObject pidObj = new JSONObject();
- pidObj.put("job-id", pid.getJobId());
- pidObj.put("connector-id", pid.getConnectorDescriptorId());
+ ObjectNode pidObj = om.createObjectNode();
+ pidObj.put("job-id", pid.getJobId().toString());
+ pidObj.put("connector-id", pid.getConnectorDescriptorId().toString());
pidObj.put("sender-index", pid.getSenderIndex());
pidObj.put("receiver-index", pid.getReceiverIndex());
- ppObj.put("partition-id", pidObj);
+ ppObj.set("partition-id", pidObj);
ppObj.put("open-time", pp.getOpenTime());
ppObj.put("close-time", pp.getCloseTime());
MultiResolutionEventProfiler samples = pp.getSamples();
ppObj.put("offset", samples.getOffset());
int resolution = samples.getResolution();
int sampleCount = samples.getCount();
- JSONArray ftA = new JSONArray();
+ ArrayNode ftA = om.createArrayNode();
int[] ft = samples.getSamples();
for (int i = 0; i < sampleCount; ++i) {
- ftA.put(ft[i]);
+ ftA.add(ft[i]);
}
- ppObj.put("frame-times", ftA);
+ ppObj.set("frame-times", ftA);
ppObj.put("resolution", resolution);
- pspArray.put(ppObj);
+ pspArray.add(ppObj);
}
- json.put("partition-send-profile", pspArray);
+ json.set("partition-send-profile", pspArray);
}
populateCounters(json);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/logs/LogFile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/logs/LogFile.java
index 8f95c35..c0f88eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/logs/LogFile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/logs/LogFile.java
@@ -18,12 +18,12 @@
*/
package org.apache.hyracks.control.common.logs;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
import java.io.File;
import java.io.FileOutputStream;
import java.io.PrintWriter;
-import org.json.JSONObject;
-
public class LogFile {
private final File root;
@@ -38,12 +38,12 @@
openTime = System.currentTimeMillis();
}
- public synchronized void log(JSONObject object) throws Exception {
+ public synchronized void log(ObjectNode object) throws Exception {
if (out == null) {
root.mkdirs();
out = new PrintWriter(new FileOutputStream(new File(root, openTime + ".log"), true));
}
- out.println(object.toString(1));
+ out.println(object.asText());
out.flush();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/ThreadDumpHelper.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/ThreadDumpHelper.java
index eacb9e0..1d6dbcd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/ThreadDumpHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/utils/ThreadDumpHelper.java
@@ -18,6 +18,13 @@
*/
package org.apache.hyracks.control.common.utils;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializationFeature;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import java.io.IOException;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
@@ -26,15 +33,12 @@
import java.util.List;
import java.util.Map;
-import org.json.JSONException;
-import org.json.JSONObject;
-
public class ThreadDumpHelper {
private ThreadDumpHelper() {
}
- public static String takeDumpJSON(ThreadMXBean threadMXBean) throws JSONException {
+ public static String takeDumpJSON(ThreadMXBean threadMXBean) throws IOException {
ThreadInfo[] threadInfos = threadMXBean.dumpAllThreads(true, true);
List<Map<String, Object>> threads = new ArrayList<>();
@@ -69,18 +73,20 @@
}
threads.add(threadMap);
}
- JSONObject json = new JSONObject();
- json.put("date", new Date());
- json.put("threads", threads);
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode json = om.createObjectNode();
+ json.put("date", new Date().toString());
+ json.putPOJO("threads", threads);
- long [] deadlockedThreads = threadMXBean.findDeadlockedThreads();
- long [] monitorDeadlockedThreads = threadMXBean.findMonitorDeadlockedThreads();
+ long[] deadlockedThreads = threadMXBean.findDeadlockedThreads();
+ long[] monitorDeadlockedThreads = threadMXBean.findMonitorDeadlockedThreads();
if (deadlockedThreads != null && deadlockedThreads.length > 0) {
- json.put("deadlocked_thread_ids", deadlockedThreads);
+ json.putPOJO("deadlocked_thread_ids", deadlockedThreads);
}
if (monitorDeadlockedThreads != null && monitorDeadlockedThreads.length > 0) {
- json.put("monitor_deadlocked_thread_ids", monitorDeadlockedThreads);
+ json.putPOJO("monitor_deadlocked_thread_ids", monitorDeadlockedThreads);
}
- return json.toString();
+ om.enable(SerializationFeature.INDENT_OUTPUT);
+ return om.writerWithDefaultPrettyPrinter().writeValueAsString(json);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index e44feb7..0158068 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -48,11 +48,6 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>org.apache.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
<version>${project.version}</version>
@@ -90,5 +85,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java
index e780ea0..6b8b38f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractConnectorDescriptor.java
@@ -18,14 +18,15 @@
*/
package org.apache.hyracks.dataflow.std.base;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.application.ICCApplicationContext;
import org.apache.hyracks.api.constraints.IConstraintAcceptor;
import org.apache.hyracks.api.dataflow.ConnectorDescriptorId;
import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
import org.apache.hyracks.api.job.ActivityCluster;
import org.apache.hyracks.api.job.IConnectorDescriptorRegistry;
-import org.json.JSONException;
-import org.json.JSONObject;
public abstract class AbstractConnectorDescriptor implements IConnectorDescriptor {
private static final long serialVersionUID = 1L;
@@ -54,8 +55,9 @@
}
@Override
- public JSONObject toJSON() throws JSONException {
- JSONObject jconn = new JSONObject();
+ public JsonNode toJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode jconn = om.createObjectNode();
jconn.put("id", String.valueOf(getConnectorId()));
jconn.put("java-class", getClass().getName());
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
index 4f22a17..a18328e 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
@@ -18,14 +18,14 @@
*/
package org.apache.hyracks.dataflow.std.base;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hyracks.api.application.ICCApplicationContext;
import org.apache.hyracks.api.constraints.IConstraintAcceptor;
import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
-import org.json.JSONException;
-import org.json.JSONObject;
public abstract class AbstractOperatorDescriptor implements IOperatorDescriptor {
private static final long serialVersionUID = 1L;
@@ -91,8 +91,9 @@
}
@Override
- public JSONObject toJSON() throws JSONException {
- JSONObject jop = new JSONObject();
+ public ObjectNode toJSON() {
+ ObjectMapper om = new ObjectMapper();
+ ObjectNode jop = om.createObjectNode();
jop.put("id", String.valueOf(getOperatorId()));
jop.put("java-class", getClass().getName());
jop.put("in-arity", getInputArity());
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index 1a5d4ab..d934e8c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -137,11 +137,6 @@
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
@@ -152,5 +147,9 @@
<artifactId>hyracks-storage-common</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index fe9c1ea..b51a578 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -137,7 +137,7 @@
protected JobId executeTest(JobSpecification spec) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(spec.toJSON().toString(2));
+ LOGGER.info(spec.toJSON().asText());
}
JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
if (LOGGER.isLoggable(Level.INFO)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index 890ab0a..4163e46 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -26,6 +26,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.FileUtils;
import org.apache.hyracks.api.client.HyracksConnection;
import org.apache.hyracks.api.client.IHyracksClientConnection;
@@ -45,7 +46,7 @@
import org.apache.hyracks.control.nc.resources.memory.FrameManager;
import org.apache.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
import org.apache.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
-import org.json.JSONArray;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -123,7 +124,7 @@
protected void runTest(JobSpecification spec) throws Exception {
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(spec.toJSON().toString(2));
+ LOGGER.info(spec.toJSON().asText());
}
JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
if (LOGGER.isLoggable(Level.INFO)) {
@@ -141,7 +142,8 @@
IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, spec.getResultSetIds().get(0));
- JSONArray resultRecords = new JSONArray();
+ ObjectMapper om = new ObjectMapper();
+ ArrayNode resultRecords = om.createArrayNode();
ByteBufferInputStream bbis = new ByteBufferInputStream();
int readSize = reader.read(resultFrame);
@@ -156,7 +158,7 @@
bbis.setByteBuffer(resultFrame.getBuffer(), start);
byte[] recordBytes = new byte[length];
bbis.read(recordBytes, 0, length);
- resultRecords.put(new String(recordBytes, 0, length));
+ resultRecords.add(new String(recordBytes, 0, length));
}
} finally {
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-server/pom.xml b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
index 70e47fe..3667951 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
@@ -164,11 +164,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20090211</version>
- </dependency>
- <dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.5</version>
@@ -183,5 +178,9 @@
<artifactId>hyracks-control-nc</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
index 8d1246b..2185826 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
@@ -23,6 +23,8 @@
import java.net.InetAddress;
import java.util.logging.Logger;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
import junit.framework.Assert;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpResponse;
@@ -32,8 +34,8 @@
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.apache.hyracks.server.process.HyracksVirtualCluster;
-import org.json.JSONArray;
-import org.json.JSONObject;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -111,15 +113,17 @@
@Test
public void IsNodelistCorrect() throws Exception {
// Ping the nodelist HTTP API
+
+ ObjectMapper om = new ObjectMapper();
String localhost = InetAddress.getLoopbackAddress().getHostAddress();
String response = getHttp("http://" + localhost + ":12345/rest/nodes");
- JSONObject result = new JSONObject(response);
- JSONArray nodes = result.getJSONArray("result");
- int numNodes = nodes.length();
+ JsonNode result = om.readTree(response);
+ JsonNode nodes = result.get("result");
+ int numNodes = nodes.size();
Assert.assertEquals("Wrong number of nodes!", 2, numNodes);
- for (int i = 0; i < nodes.length(); i++) {
- JSONObject node = nodes.getJSONObject(i);
- String id = node.getString("node-id");
+ for (int i = 0; i < nodes.size(); i++) {
+ JsonNode node = nodes.get(i);
+ String id = node.get("node-id").asText();
if (id.equals("red") || id.equals("blue")) {
continue;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
index 5aebb22..724df9e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/pom.xml
@@ -46,10 +46,5 @@
<artifactId>hyracks-api</artifactId>
<version>${project.version}</version>
</dependency>
- <dependency>
- <groupId>org.apache.hyracks</groupId>
- <artifactId>hyracks-api</artifactId>
- <version>${project.version}</version>
- </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index 9cd9841..7390905 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -108,6 +108,16 @@
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ <version>2.8.4</version>
+ </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-core</artifactId>
+ <version>2.8.4</version>
+ </dependency>
</dependencies>
</dependencyManagement>