HTTP API++: Threaddumps / CC / INI, Misc
- Thread dump returned in JSON format.
e.g. http://localhost:19002/admin/cluster/node/asterix_nc2/threaddump
- Add CC config, stats, threaddumps
- Add CC / NC ini settings under config endpoints
- Wrap doGet implementations for cluster APIs to fix SonarQube issues
- Avoid boxing arrays & lists for JSONObject.put()
Change-Id: I536606a1fbc394c6c70bb8ce14791cc411691617
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1159
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Michael Blow <mblow@apache.org>
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index f35b844..7a9f654 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -219,7 +219,7 @@
*/
public static void main(String[] args) {
AsterixHyracksIntegrationUtil integrationUtil = new AsterixHyracksIntegrationUtil();
- run(integrationUtil, false, false);
+ run(integrationUtil, Boolean.getBoolean("cleanup.start"), Boolean.getBoolean("cleanup.shutdown"));
}
protected static void run(final AsterixHyracksIntegrationUtil integrationUtil, boolean cleanupOnStart,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterAPIServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterAPIServlet.java
index 8cedabc..86b6602 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterAPIServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterAPIServlet.java
@@ -23,6 +23,8 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
@@ -37,16 +39,30 @@
public class ClusterAPIServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
+ private static final Logger LOGGER = Logger.getLogger(ClusterAPIServlet.class.getName());
+
+ public static final String NODE_ID_KEY = "node_id";
+ public static final String CONFIG_URI_KEY = "configUri";
+ public static final String STATS_URI_KEY = "statsUri";
+ public static final String THREAD_DUMP_URI_KEY = "threadDumpUri";
@Override
- public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ public final void doGet(HttpServletRequest request, HttpServletResponse response) {
+ try {
+ getUnsafe(request, response);
+ } catch (IOException e) {
+ LOGGER.log(Level.WARNING, "Unhandled IOException thrown from " + getClass().getName() + " get impl", e);
+ }
+ }
+
+ protected void getUnsafe(HttpServletRequest request, HttpServletResponse response) throws IOException {
response.setContentType("application/json");
response.setCharacterEncoding("utf-8");
PrintWriter responseWriter = response.getWriter();
JSONObject json;
try {
- json = getClusterStateJSON(request, "node/");
+ json = getClusterStateJSON(request, "");
response.setStatus(HttpServletResponse.SC_OK);
responseWriter.write(json.toString(4));
} catch (IllegalArgumentException e) {
@@ -84,12 +100,29 @@
requestURL.append('/');
}
requestURL.append(pathToNode);
- String nodeURL = requestURL.toString().replaceAll("/[^./]+/\\.\\./", "/");
+ String clusterURL = "";
+ String newClusterURL = requestURL.toString();
+ while (!clusterURL.equals(newClusterURL)) {
+ clusterURL = newClusterURL;
+ newClusterURL = clusterURL.replaceAll("/[^./]+/\\.\\./", "/");
+ }
+ String nodeURL = clusterURL + "node/";
for (int i = 0; i < ncs.length(); i++) {
JSONObject nc = ncs.getJSONObject(i);
- nc.put("configUri", nodeURL + nc.getString("node_id") + "/config");
- nc.put("statsUri", nodeURL + nc.getString("node_id") + "/stats");
+ nc.put(CONFIG_URI_KEY, nodeURL + nc.getString(NODE_ID_KEY) + "/config");
+ nc.put(STATS_URI_KEY, nodeURL + nc.getString(NODE_ID_KEY) + "/stats");
+ nc.put(THREAD_DUMP_URI_KEY, nodeURL + nc.getString(NODE_ID_KEY) + "/threaddump");
}
+ JSONObject cc;
+ if (json.has("cc")) {
+ cc = json.getJSONObject("cc");
+ } else {
+ cc = new JSONObject();
+ json.put("cc", cc);
+ }
+ cc.put(CONFIG_URI_KEY, clusterURL + "cc/config");
+ cc.put(STATS_URI_KEY, clusterURL + "cc/stats");
+ cc.put(THREAD_DUMP_URI_KEY, clusterURL + "cc/threaddump");
return json;
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterCCDetailsAPIServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterCCDetailsAPIServlet.java
new file mode 100644
index 0000000..64e65a5
--- /dev/null
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterCCDetailsAPIServlet.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.api.http.servlet;
+
+import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.asterix.app.result.ResultUtil;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.json.JSONObject;
+
+public class ClusterCCDetailsAPIServlet extends ClusterAPIServlet {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ protected void getUnsafe(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ PrintWriter responseWriter = response.getWriter();
+ ServletContext context = getServletContext();
+ IHyracksClientConnection hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
+ JSONObject json;
+
+ try {
+ if (request.getPathInfo() == null) {
+ json = getClusterStateJSON(request, "../").getJSONObject("cc");
+ } else {
+ json = processNode(request, hcc);
+ }
+ response.setStatus(HttpServletResponse.SC_OK);
+ response.setContentType("application/json");
+ response.setCharacterEncoding("utf-8");
+ responseWriter.write(json.toString(4));
+ } catch (IllegalArgumentException e) { //NOSONAR - exception not logged or rethrown
+ response.sendError(HttpServletResponse.SC_NOT_FOUND);
+ } catch (Exception e) {
+ ResultUtil.apiErrorHandler(responseWriter, e);
+ response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+ }
+ responseWriter.flush();
+ }
+
+ private JSONObject processNode(HttpServletRequest request, IHyracksClientConnection hcc)
+ throws Exception {
+ String pathInfo = request.getPathInfo();
+ if (pathInfo.endsWith("/")) {
+ throw new IllegalArgumentException();
+ }
+ String[] parts = pathInfo.substring(1).split("/");
+
+ if (request.getPathInfo() == null) {
+ return getClusterStateJSON(request, "../../").getJSONObject("cc");
+ } else if (parts.length == 1) {
+ switch (parts[0]) {
+ case "config":
+ return new JSONObject(hcc.getNodeDetailsJSON(null, false, true));
+ case "stats":
+ return new JSONObject(hcc.getNodeDetailsJSON(null, true, false));
+ case "threaddump":
+ return processCCThreadDump(hcc);
+
+ default:
+ throw new IllegalArgumentException();
+ }
+
+ } else {
+ throw new IllegalArgumentException();
+ }
+ }
+
+ private JSONObject processCCThreadDump(IHyracksClientConnection hcc) throws Exception {
+ String dump = hcc.getThreadDump(null);
+ if (dump == null) {
+ throw new IllegalArgumentException();
+ }
+ return new JSONObject(dump);
+ }
+
+}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterNodeDetailsAPIServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterNodeDetailsAPIServlet.java
index 9cccdad..5a91fee 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterNodeDetailsAPIServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ClusterNodeDetailsAPIServlet.java
@@ -18,6 +18,8 @@
*/
package org.apache.asterix.api.http.servlet;
+import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
+
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
@@ -34,13 +36,11 @@
import org.json.JSONException;
import org.json.JSONObject;
-import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
-
public class ClusterNodeDetailsAPIServlet extends ClusterAPIServlet {
private static final long serialVersionUID = 1L;
@Override
- public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
+ protected void getUnsafe(HttpServletRequest request, HttpServletResponse response) throws IOException {
PrintWriter responseWriter = response.getWriter();
ServletContext context = getServletContext();
IHyracksClientConnection hcc = (IHyracksClientConnection) context.getAttribute(HYRACKS_CONNECTION_ATTR);
@@ -49,7 +49,7 @@
try {
if (request.getPathInfo() == null) {
json = new JSONObject();
- json.put("ncs", getClusterStateJSON(request, "").getJSONArray("ncs"));
+ json.put("ncs", getClusterStateJSON(request, "../").getJSONArray("ncs"));
} else {
json = processNode(request, hcc);
}
@@ -68,11 +68,15 @@
private JSONObject processNode(HttpServletRequest request, IHyracksClientConnection hcc)
throws Exception {
- String[] parts = request.getPathInfo().substring(1).replaceAll("/+", "/").split("/");
+ String pathInfo = request.getPathInfo();
+ if (pathInfo.endsWith("/")) {
+ throw new IllegalArgumentException();
+ }
+ String[] parts = pathInfo.substring(1).split("/");
final String node = parts[0];
if (parts.length == 1) {
- JSONArray ncs = getClusterStateJSON(request, "../").getJSONArray("ncs");
+ JSONArray ncs = getClusterStateJSON(request, "../../").getJSONArray("ncs");
for (int i = 0; i < ncs.length(); i++) {
JSONObject json = ncs.getJSONObject(i);
if (node.equals(json.getString("node_id"))) {
@@ -90,9 +94,14 @@
case "config":
json = processNodeConfig(hcc, node);
break;
+
case "stats":
json = processNodeStats(hcc, node);
break;
+
+ case "threaddump":
+ return processNodeThreadDump(hcc, node);
+
default:
throw new IllegalArgumentException();
}
@@ -119,10 +128,6 @@
}
private JSONObject processNodeStats(IHyracksClientConnection hcc, String node) throws Exception {
- if ("cc".equals(node)) {
- return new JSONObject();
- }
-
final String details = hcc.getNodeDetailsJSON(node, true, false);
if (details == null) {
throw new IllegalArgumentException();
@@ -136,28 +141,55 @@
keys.add((String) iter.next());
}
- int gcNames = json.getJSONArray("gc-names").length();
+ final JSONArray gcNames = json.getJSONArray("gc-names");
+ final JSONArray gcCollectionTimes = json.getJSONArray("gc-collection-times");
+ final JSONArray gcCollectionCounts = json.getJSONArray("gc-collection-counts");
+
for (String key : keys) {
- if (key.startsWith("gc-collection-")) {
- final JSONArray gcArray = json.getJSONArray(key);
- for (int i = 0; i < gcNames; i++) {
- gcArray.put(i, gcArray.getJSONArray(i).get(index));
+ if (key.startsWith("gc-")) {
+ json.remove(key);
+ } else if (json.get(key) instanceof JSONArray) {
+ final JSONArray valueArray = json.getJSONArray(key);
+ // fixup an index of -1 to the final element in the array (i.e. RRD_SIZE)
+ if (index == -1) {
+ index = valueArray.length() - 1;
}
- } else if (!"node-id".equals(key) && !"gc-names".equals(key)) {
- json.put(key, json.getJSONArray(key).get(index));
+ final Object value = valueArray.get(index);
+ json.remove(key);
+ json.put(key.replaceAll("s$",""), value);
}
}
+ List<JSONObject> gcs = new ArrayList<>();
+
+ for (int i = 0; i < gcNames.length(); i++) {
+ JSONObject gc = new JSONObject();
+ gc.put("name", gcNames.get(i));
+ gc.put("collection-time", ((JSONArray)gcCollectionTimes.get(i)).get(index));
+ gc.put("collection-count", ((JSONArray)gcCollectionCounts.get(i)).get(index));
+ gcs.add(gc);
+ }
+ json.put("gcs", gcs);
+
return json;
}
private JSONObject processNodeConfig(IHyracksClientConnection hcc, String node) throws Exception {
- if ("cc".equals(node)) {
- return new JSONObject();
- }
String config = hcc.getNodeDetailsJSON(node, false, true);
if (config == null) {
throw new IllegalArgumentException();
}
return new JSONObject(config);
}
+
+ private JSONObject processNodeThreadDump(IHyracksClientConnection hcc, String node) throws Exception {
+ if ("cc".equals(node)) {
+ return new JSONObject();
+ }
+ String dump = hcc.getThreadDump(node);
+ if (dump == null) {
+ throw new IllegalArgumentException();
+ }
+ return new JSONObject(dump);
+ }
+
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index 9883960..bd910a5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -18,6 +18,9 @@
*/
package org.apache.asterix.hyracks.bootstrap;
+import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_BUILD_PROP_ATTR;
+import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
+
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
@@ -29,6 +32,7 @@
import org.apache.asterix.api.http.servlet.APIServlet;
import org.apache.asterix.api.http.servlet.AQLAPIServlet;
import org.apache.asterix.api.http.servlet.ClusterAPIServlet;
+import org.apache.asterix.api.http.servlet.ClusterCCDetailsAPIServlet;
import org.apache.asterix.api.http.servlet.ClusterNodeDetailsAPIServlet;
import org.apache.asterix.api.http.servlet.ConnectorAPIServlet;
import org.apache.asterix.api.http.servlet.DDLAPIServlet;
@@ -72,9 +76,6 @@
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.servlet.ServletMapping;
-import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_BUILD_PROP_ATTR;
-import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
-
public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
private static final Logger LOGGER = Logger.getLogger(CCApplicationEntryPoint.class.getName());
@@ -219,6 +220,7 @@
addServlet(context, Servlets.VERSION);
addServlet(context, Servlets.CLUSTER_STATE);
addServlet(context, Servlets.CLUSTER_STATE_NODE_DETAIL);
+ addServlet(context, Servlets.CLUSTER_STATE_CC_DETAIL);
return jsonAPIServer;
}
@@ -294,6 +296,8 @@
return new ClusterAPIServlet();
case CLUSTER_STATE_NODE_DETAIL:
return new ClusterNodeDetailsAPIServlet();
+ case CLUSTER_STATE_CC_DETAIL:
+ return new ClusterCCDetailsAPIServlet();
default:
throw new IllegalStateException(String.valueOf(key));
}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/APIQueries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/APIQueries.xml
new file mode 100644
index 0000000..753554c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/APIQueries.xml
@@ -0,0 +1,46 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements. See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership. The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License. You may obtain a copy of the License at
+ !
+ ! http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied. See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<test-group name="api">
+ <test-case FilePath="api">
+ <compilation-unit name="cluster_state_1">
+ <output-dir compare="Text">cluster_state_1</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="api">
+ <compilation-unit name="cluster_state_2">
+ <output-dir compare="Text">cluster_state_2</output-dir>
+ <expected-error>HTTP/1.1 404 Not Found</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="api">
+ <compilation-unit name="cluster_state_3">
+ <output-dir compare="Text">cluster_state_3</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="api">
+ <compilation-unit name="cluster_state_4">
+ <output-dir compare="Text">cluster_state_4</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="api">
+ <compilation-unit name="cluster_state_cc_1">
+ <output-dir compare="Text">cluster_state_cc_1</output-dir>
+ </compilation-unit>
+ </test-case>
+</test-group>
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/cluster_state_cc_1/cluster_state_cc_1.1.cstate.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/cluster_state_cc_1/cluster_state_cc_1.1.cstate.aql
new file mode 100644
index 0000000..726f7e7
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/cluster_state_cc_1/cluster_state_cc_1.1.cstate.aql
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Test case Name : cluster_state_3
+ * Description : cluster state api all nodes
+ * Expected Result : Positive
+ * Date : 8th September 2016
+ */
+/cc
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/ComparisonQueries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/ComparisonQueries.xml
index 7e8eaca9..8493df2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/ComparisonQueries.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/ComparisonQueries.xml
@@ -1,209 +1,229 @@
- <test-case FilePath="comparison">
- <compilation-unit name="year_month_duration_order">
- <output-dir compare="Text">year_month_duration_order</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="datetime_order">
- <output-dir compare="Text">datetime_order</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="datetime_range">
- <output-dir compare="Text">datetime_range</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="datetime_tzeq">
- <output-dir compare="Text">datetime_tzeq</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="interval_order">
- <output-dir compare="Text">interval_order</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="time_order">
- <output-dir compare="Text">time_order</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="date_order">
- <output-dir compare="Text">date_order</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="double">
- <output-dir compare="Text">double</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="double_gte_01">
- <output-dir compare="Text">double_gte_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="double_null">
- <output-dir compare="Text">double_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="eq_01">
- <output-dir compare="Text">eq_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="float">
- <output-dir compare="Text">float</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="float_null">
- <output-dir compare="Text">float_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="gt_01">
- <output-dir compare="Text">gt_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="gte_01">
- <output-dir compare="Text">gte_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int16">
- <output-dir compare="Text">int16</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int16_null">
- <output-dir compare="Text">int16_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int32">
- <output-dir compare="Text">int32</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int32_null">
- <output-dir compare="Text">int32_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int64">
- <output-dir compare="Text">int64</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int64_null">
- <output-dir compare="Text">int64_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int8">
- <output-dir compare="Text">int8</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="int8_null">
- <output-dir compare="Text">int8_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="lt_01">
- <output-dir compare="Text">lt_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="lte_01">
- <output-dir compare="Text">lte_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="neq_01">
- <output-dir compare="Text">neq_01</output-dir>
- </compilation-unit>
- </test-case>
- <!--
- <test-case FilePath="comparison">
- <compilation-unit name="numeric-comparison_01">
- <output-dir compare="Text">numeric-comparison_01</output-dir>
- </compilation-unit>
- </test-case>
- -->
- <test-case FilePath="comparison">
- <compilation-unit name="string">
- <output-dir compare="Text">string</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="string_null">
- <output-dir compare="Text">string_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_equality">
- <output-dir compare="Text">issue363_equality</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_duration">
- <output-dir compare="Text">issue363_inequality_duration</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the DURATION type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_interval">
- <output-dir compare="Text">issue363_inequality_interval</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the INTERVAL type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_point">
- <output-dir compare="Text">issue363_inequality_point</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the POINT type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_line">
- <output-dir compare="Text">issue363_inequality_line</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the LINE type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_polygon">
- <output-dir compare="Text">issue363_inequality_polygon</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the POLYGON type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_rectangle">
- <output-dir compare="Text">issue363_inequality_rectangle</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the RECTANGLE type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="issue363_inequality_circle">
- <output-dir compare="Text">issue363_inequality_circle</output-dir>
- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the CIRCLE type are not defined</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="binary">
- <output-dir compare="Text">binary</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="binary_null">
- <output-dir compare="Text">binary_null</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison">
- <compilation-unit name="uuid_1">
- <output-dir compare="Text">uuid_1</output-dir>
- </compilation-unit>
- </test-case>
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements. See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership. The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License. You may obtain a copy of the License at
+ !
+ ! http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied. See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<test-group name="comparison">
+ <test-case FilePath="comparison">
+ <compilation-unit name="year_month_duration_order">
+ <output-dir compare="Text">year_month_duration_order</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="datetime_order">
+ <output-dir compare="Text">datetime_order</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="datetime_range">
+ <output-dir compare="Text">datetime_range</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="datetime_tzeq">
+ <output-dir compare="Text">datetime_tzeq</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="interval_order">
+ <output-dir compare="Text">interval_order</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="time_order">
+ <output-dir compare="Text">time_order</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="date_order">
+ <output-dir compare="Text">date_order</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="double">
+ <output-dir compare="Text">double</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="double_gte_01">
+ <output-dir compare="Text">double_gte_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="double_null">
+ <output-dir compare="Text">double_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="eq_01">
+ <output-dir compare="Text">eq_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="float">
+ <output-dir compare="Text">float</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="float_null">
+ <output-dir compare="Text">float_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="gt_01">
+ <output-dir compare="Text">gt_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="gte_01">
+ <output-dir compare="Text">gte_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int16">
+ <output-dir compare="Text">int16</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int16_null">
+ <output-dir compare="Text">int16_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int32">
+ <output-dir compare="Text">int32</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int32_null">
+ <output-dir compare="Text">int32_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int64">
+ <output-dir compare="Text">int64</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int64_null">
+ <output-dir compare="Text">int64_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int8">
+ <output-dir compare="Text">int8</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="int8_null">
+ <output-dir compare="Text">int8_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="lt_01">
+ <output-dir compare="Text">lt_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="lte_01">
+ <output-dir compare="Text">lte_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="neq_01">
+ <output-dir compare="Text">neq_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <!--
+ <test-case FilePath="comparison">
+ <compilation-unit name="numeric-comparison_01">
+ <output-dir compare="Text">numeric-comparison_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ -->
+ <test-case FilePath="comparison">
+ <compilation-unit name="string">
+ <output-dir compare="Text">string</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="string_null">
+ <output-dir compare="Text">string_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_equality">
+ <output-dir compare="Text">issue363_equality</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_duration">
+ <output-dir compare="Text">issue363_inequality_duration</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the DURATION type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_interval">
+ <output-dir compare="Text">issue363_inequality_interval</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the INTERVAL type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_point">
+ <output-dir compare="Text">issue363_inequality_point</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the POINT type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_line">
+ <output-dir compare="Text">issue363_inequality_line</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the LINE type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_polygon">
+ <output-dir compare="Text">issue363_inequality_polygon</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the POLYGON type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_rectangle">
+ <output-dir compare="Text">issue363_inequality_rectangle</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the RECTANGLE type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="issue363_inequality_circle">
+ <output-dir compare="Text">issue363_inequality_circle</output-dir>
+ <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the CIRCLE type are not defined</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="binary">
+ <output-dir compare="Text">binary</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="binary_null">
+ <output-dir compare="Text">binary_null</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison">
+ <compilation-unit name="uuid_1">
+ <output-dir compare="Text">uuid_1</output-dir>
+ </compilation-unit>
+ </test-case>
+</test-group>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/deep_equal/DeepEqualQueries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/deep_equal/DeepEqualQueries.xml
index badeff0..fab2e85 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/deep_equal/DeepEqualQueries.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/comparison/deep_equal/DeepEqualQueries.xml
@@ -16,13 +16,15 @@
! specific language governing permissions and limitations
! under the License.
!-->
- <test-case FilePath="comparison/deep_equal">
- <compilation-unit name="documentation-example">
- <output-dir compare="Text">documentation-example</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="comparison/deep_equal">
- <compilation-unit name="open-records-example">
- <output-dir compare="Text">open-records-example</output-dir>
- </compilation-unit>
- </test-case>
\ No newline at end of file
+<test-group name="deep-equal">
+ <test-case FilePath="comparison/deep_equal">
+ <compilation-unit name="documentation-example">
+ <output-dir compare="Text">documentation-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="comparison/deep_equal">
+ <compilation-unit name="open-records-example">
+ <output-dir compare="Text">open-records-example</output-dir>
+ </compilation-unit>
+ </test-case>
+</test-group>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
index 0f2f757..bfe8fa7 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/records/RecordsQueries.xml
@@ -16,162 +16,164 @@
! specific language governing permissions and limitations
! under the License.
!-->
- <test-case FilePath="records">
- <compilation-unit name="access-nested-fields">
- <output-dir compare="Text">access-nested-fields</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="closed-record-constructor_01">
- <output-dir compare="Text">closed-record-constructor_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="closed-record-constructor_02">
- <output-dir compare="Text">closed-record-constructor_02</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="closed-record-constructor_03">
- <output-dir compare="Text">closed-record-constructor_03</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="expFieldName">
- <output-dir compare="Text">expFieldName</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="field-access-by-index_01">
- <output-dir compare="Text">field-access-by-index_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="field-access-on-open-field">
- <output-dir compare="Text">field-access-on-open-field</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-fields">
- <compilation-unit name="documentation-example">
- <output-dir compare="Text">documentation-example</output-dir>
- </compilation-unit>
- </test-case>
- <!--test-case FilePath="records/get-record-fields">
- <compilation-unit name="tiny-social-example">
- <output-dir compare="Text">tiny-social-example</output-dir>
- </compilation-unit>
- </test-case!-->
- <test-case FilePath="records/get-record-fields">
- <compilation-unit name="tiny-social-example-no-complex-types">
- <output-dir compare="Text">tiny-social-example-no-complex-types</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-fields">
- <compilation-unit name="tiny-social-example-only-lists">
- <output-dir compare="Text">tiny-social-example-only-lists</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-fields">
- <compilation-unit name="tiny-social-example-only-records">
- <output-dir compare="Text">tiny-social-example-only-records</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-field-value">
- <compilation-unit name="documentation-example">
- <output-dir compare="Text">documentation-example</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-field-value">
- <compilation-unit name="highly-nested-closed">
- <output-dir compare="Text">highly-nested-closed</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-field-value">
- <compilation-unit name="highly-nested-mixed">
- <output-dir compare="Text">highly-nested-mixed</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-field-value">
- <compilation-unit name="highly-nested-open">
- <output-dir compare="Text">highly-nested-open</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/get-record-field-value">
+<test-group name="records">
+ <test-case FilePath="records">
+ <compilation-unit name="access-nested-fields">
+ <output-dir compare="Text">access-nested-fields</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="closed-record-constructor_01">
+ <output-dir compare="Text">closed-record-constructor_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="closed-record-constructor_02">
+ <output-dir compare="Text">closed-record-constructor_02</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="closed-record-constructor_03">
+ <output-dir compare="Text">closed-record-constructor_03</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="expFieldName">
+ <output-dir compare="Text">expFieldName</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="field-access-by-index_01">
+ <output-dir compare="Text">field-access-by-index_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="field-access-on-open-field">
+ <output-dir compare="Text">field-access-on-open-field</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-fields">
+ <compilation-unit name="documentation-example">
+ <output-dir compare="Text">documentation-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <!--test-case FilePath="records/get-record-fields">
<compilation-unit name="tiny-social-example">
- <output-dir compare="Text">tiny-social-example</output-dir>
+ <output-dir compare="Text">tiny-social-example</output-dir>
</compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="open-record-constructor_01">
- <output-dir compare="Text">open-record-constructor_01</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="open-record-constructor_02">
- <output-dir compare="Text">open-record-constructor_02</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="closed-closed-fieldname-conflict_issue173">
- <output-dir compare="Text">closed-closed-fieldname-conflict_issue173</output-dir>
- <expected-error>java.lang.IllegalStateException: Closed fields 0 and 1 have the same field name "name"</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="open-closed-fieldname-conflict_issue173">
- <output-dir compare="Text">open-closed-fieldname-conflict_issue173</output-dir>
- <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open field "name" has the same field name as closed field at index 0</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records">
- <compilation-unit name="open-open-fieldname-conflict_issue173">
- <output-dir compare="Text">open-open-fieldname-conflict_issue173</output-dir>
- <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open fields 0 and 1 have the same field name "name"</expected-error>
- </compilation-unit>
- </test-case>
- <!-- RECORD MANIPULATION TESTS -->
- <test-case FilePath="records/record-merge">
- <compilation-unit name="documentation-example">
- <output-dir compare="Text">documentation-example</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-merge">
- <compilation-unit name="tiny-social-example-only-records">
- <output-dir compare="Text">tiny-social-example-only-records</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-merge">
- <compilation-unit name="highly-nested-open">
- <output-dir compare="Text">highly-nested-open</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-add-fields">
- <compilation-unit name="documentation-example">
- <output-dir compare="Text">documentation-example</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-add-fields">
- <compilation-unit name="tiny-social-example-only-records">
- <output-dir compare="Text">tiny-social-example-only-records</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-add-fields">
- <compilation-unit name="highly-nested-open">
- <output-dir compare="Text">highly-nested-open</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-remove-fields">
- <compilation-unit name="documentation-example">
- <output-dir compare="Text">documentation-example</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-remove-fields">
- <compilation-unit name="tiny-social-example-only-records">
- <output-dir compare="Text">tiny-social-example-only-records</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="records/record-remove-fields">
- <compilation-unit name="highly-nested-open">
- <output-dir compare="Text">highly-nested-open</output-dir>
- </compilation-unit>
- </test-case>
+ </test-case!-->
+ <test-case FilePath="records/get-record-fields">
+ <compilation-unit name="tiny-social-example-no-complex-types">
+ <output-dir compare="Text">tiny-social-example-no-complex-types</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-fields">
+ <compilation-unit name="tiny-social-example-only-lists">
+ <output-dir compare="Text">tiny-social-example-only-lists</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-fields">
+ <compilation-unit name="tiny-social-example-only-records">
+ <output-dir compare="Text">tiny-social-example-only-records</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-field-value">
+ <compilation-unit name="documentation-example">
+ <output-dir compare="Text">documentation-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-field-value">
+ <compilation-unit name="highly-nested-closed">
+ <output-dir compare="Text">highly-nested-closed</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-field-value">
+ <compilation-unit name="highly-nested-mixed">
+ <output-dir compare="Text">highly-nested-mixed</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-field-value">
+ <compilation-unit name="highly-nested-open">
+ <output-dir compare="Text">highly-nested-open</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/get-record-field-value">
+ <compilation-unit name="tiny-social-example">
+ <output-dir compare="Text">tiny-social-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="open-record-constructor_01">
+ <output-dir compare="Text">open-record-constructor_01</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="open-record-constructor_02">
+ <output-dir compare="Text">open-record-constructor_02</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="closed-closed-fieldname-conflict_issue173">
+ <output-dir compare="Text">closed-closed-fieldname-conflict_issue173</output-dir>
+ <expected-error>java.lang.IllegalStateException: Closed fields 0 and 1 have the same field name "name"</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="open-closed-fieldname-conflict_issue173">
+ <output-dir compare="Text">open-closed-fieldname-conflict_issue173</output-dir>
+ <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open field "name" has the same field name as closed field at index 0</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
+ <compilation-unit name="open-open-fieldname-conflict_issue173">
+ <output-dir compare="Text">open-open-fieldname-conflict_issue173</output-dir>
+ <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException: Open fields 0 and 1 have the same field name "name"</expected-error>
+ </compilation-unit>
+ </test-case>
+ <!-- RECORD MANIPULATION TESTS -->
+ <test-case FilePath="records/record-merge">
+ <compilation-unit name="documentation-example">
+ <output-dir compare="Text">documentation-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-merge">
+ <compilation-unit name="tiny-social-example-only-records">
+ <output-dir compare="Text">tiny-social-example-only-records</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-merge">
+ <compilation-unit name="highly-nested-open">
+ <output-dir compare="Text">highly-nested-open</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-add-fields">
+ <compilation-unit name="documentation-example">
+ <output-dir compare="Text">documentation-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-add-fields">
+ <compilation-unit name="tiny-social-example-only-records">
+ <output-dir compare="Text">tiny-social-example-only-records</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-add-fields">
+ <compilation-unit name="highly-nested-open">
+ <output-dir compare="Text">highly-nested-open</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-remove-fields">
+ <compilation-unit name="documentation-example">
+ <output-dir compare="Text">documentation-example</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-remove-fields">
+ <compilation-unit name="tiny-social-example-only-records">
+ <output-dir compare="Text">tiny-social-example-only-records</output-dir>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records/record-remove-fields">
+ <compilation-unit name="highly-nested-open">
+ <output-dir compare="Text">highly-nested-open</output-dir>
+ </compilation-unit>
+ </test-case>
+</test-group>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
index 1dfa743..fc49cb1 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
@@ -1,4 +1,9 @@
{
+ "cc": {
+ "configUri": "http://127.0.0.1:19002/admin/cluster/cc/config",
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/cc/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/cc/threaddump"
+ },
"config": {
"api.port": 19002,
"cc.java.opts": "-Xmx1024m",
@@ -58,7 +63,8 @@
"partition_0",
"partition_1"
],
- "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats"
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/threaddump"
},
{
"configUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/config",
@@ -67,8 +73,9 @@
"partition_2",
"partition_3"
],
- "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats"
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump"
}
],
"state": "ACTIVE"
-}
\ No newline at end of file
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_3/cluster_state_3.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_3/cluster_state_3.1.adm
index 2bbfa10..4357d25 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_3/cluster_state_3.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_3/cluster_state_3.1.adm
@@ -6,7 +6,8 @@
"partition_0",
"partition_1"
],
- "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats"
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/threaddump"
},
{
"configUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/config",
@@ -15,6 +16,7 @@
"partition_2",
"partition_3"
],
- "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats"
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump"
}
]}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_4/cluster_state_4.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_4/cluster_state_4.1.adm
index 819c1e7..c893534 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_4/cluster_state_4.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_4/cluster_state_4.1.adm
@@ -5,5 +5,6 @@
"partition_0",
"partition_1"
],
- "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats"
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/node/asterix_nc1/threaddump"
}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_cc_1/cluster_state_cc_1.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_cc_1/cluster_state_cc_1.1.adm
new file mode 100644
index 0000000..5e1e154
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_cc_1/cluster_state_cc_1.1.adm
@@ -0,0 +1,5 @@
+{
+ "configUri": "http://127.0.0.1:19002/admin/cluster/cc/config",
+ "statsUri": "http://127.0.0.1:19002/admin/cluster/cc/stats",
+ "threadDumpUri": "http://127.0.0.1:19002/admin/cluster/cc/threaddump"
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index 103d7bd..88b47f2 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -21,6 +21,7 @@
<!ENTITY ComparisonQueries SYSTEM "queries/comparison/ComparisonQueries.xml">
<!ENTITY RecordsQueries SYSTEM "queries/records/RecordsQueries.xml">
<!ENTITY DeepEqualQueries SYSTEM "queries/comparison/deep_equal/DeepEqualQueries.xml">
+ <!ENTITY APIQueries SYSTEM "queries/api/APIQueries.xml">
]>
<test-suite
@@ -28,6 +29,7 @@
ResultOffsetPath="results"
QueryOffsetPath="queries"
QueryFileExtension=".aql">
+ &APIQueries;
<test-group name="external-library">
<test-case FilePath="external-library">
<compilation-unit name="typed_adapter">
@@ -1251,9 +1253,7 @@
</compilation-unit>
</test-case>
</test-group>
- <test-group name="comparison">
- &ComparisonQueries;
- </test-group>
+ &ComparisonQueries;
<test-group name="constructor">
<test-case FilePath="constructor">
<compilation-unit name="binary_01">
@@ -4815,12 +4815,8 @@
</compilation-unit>
</test-case>-->
</test-group>
- <test-group name="records">
- &RecordsQueries;
- </test-group>
- <test-group name="deep-equal">
- &DeepEqualQueries;
- </test-group>
+ &RecordsQueries;
+ &DeepEqualQueries;
<test-group name="scan">
<test-case FilePath="scan">
<compilation-unit name="10">
@@ -7315,27 +7311,4 @@
</compilation-unit>
</test-case>
</test-group>
- <test-group name="api">
- <test-case FilePath="api">
- <compilation-unit name="cluster_state_1">
- <output-dir compare="Text">cluster_state_1</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="api">
- <compilation-unit name="cluster_state_2">
- <output-dir compare="Text">cluster_state_2</output-dir>
- <expected-error>HTTP/1.1 404 Not Found</expected-error>
- </compilation-unit>
- </test-case>
- <test-case FilePath="api">
- <compilation-unit name="cluster_state_3">
- <output-dir compare="Text">cluster_state_3</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="api">
- <compilation-unit name="cluster_state_4">
- <output-dir compare="Text">cluster_state_4</output-dir>
- </compilation-unit>
- </test-case>
- </test-group>
</test-suite>
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ServletUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ServletUtil.java
index 047f010..5436dc7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ServletUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/ServletUtil.java
@@ -36,7 +36,8 @@
SHUTDOWN("/admin/shutdown"),
VERSION("/admin/version"),
CLUSTER_STATE("/admin/cluster"),
- CLUSTER_STATE_NODE_DETAIL("/admin/cluster/node/*");
+ CLUSTER_STATE_NODE_DETAIL("/admin/cluster/node/*"),
+ CLUSTER_STATE_CC_DETAIL("/admin/cluster/cc/*");
private final String path;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceFunctions.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceFunctions.java
index ca0783b..88c4edb 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceFunctions.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceFunctions.java
@@ -46,7 +46,8 @@
CLI_DEPLOY_BINARY,
CLI_UNDEPLOY_BINARY,
CLUSTER_SHUTDOWN,
- GET_NODE_DETAILS_JSON
+ GET_NODE_DETAILS_JSON,
+ THREAD_DUMP
}
public abstract static class Function implements Serializable {
@@ -325,4 +326,20 @@
}
}
+ public static class ThreadDumpFunction extends Function {
+ private final String node;
+
+ public ThreadDumpFunction(String node) {
+ this.node = node;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.THREAD_DUMP;
+ }
+
+ public String getNode() {
+ return node;
+ }
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
index 3f453e5..86a8ceb 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
@@ -138,4 +138,12 @@
new HyracksClientInterfaceFunctions.GetNodeDetailsJSONFunction(nodeId, includeStats, includeConfig);
return (String) rpci.call(ipcHandle, gjsf);
}
+
+ @Override
+ public String getThreadDump(String node) throws Exception {
+ HyracksClientInterfaceFunctions.ThreadDumpFunction tdf =
+ new HyracksClientInterfaceFunctions.ThreadDumpFunction(node);
+ return (String)rpci.call(ipcHandle, tdf);
+
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksConnection.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksConnection.java
index 73813f3..4b27caf 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksConnection.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/HyracksConnection.java
@@ -197,4 +197,9 @@
public String getNodeDetailsJSON(String nodeId, boolean includeStats, boolean includeConfig) throws Exception {
return hci.getNodeDetailsJSON(nodeId, includeStats, includeConfig);
}
+
+ @Override
+ public String getThreadDump(String node) throws Exception {
+ return hci.getThreadDump(node);
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientConnection.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientConnection.java
index 0690c9f..6c15da2 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientConnection.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientConnection.java
@@ -197,4 +197,9 @@
* @throws Exception
*/
public String getNodeDetailsJSON(String nodeId, boolean includeStats, boolean includeConfig) throws Exception;
+
+ /**
+ * Gets thread dump from the specified node as a serialized JSON string
+ */
+ public String getThreadDump(String node) throws Exception;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientInterface.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientInterface.java
index 4ddb81f..c2af2e7 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientInterface.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/IHyracksClientInterface.java
@@ -57,4 +57,6 @@
public void stopCluster() throws Exception;
public String getNodeDetailsJSON(String nodeId, boolean includeStats, boolean includeConfig) throws Exception;
+
+ public String getThreadDump(String node) throws Exception;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index 8dada48..ce272eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -22,6 +22,7 @@
import java.io.FileReader;
import java.net.InetAddress;
import java.net.InetSocketAddress;
+import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
@@ -66,6 +67,8 @@
import org.apache.hyracks.control.cc.work.GetJobInfoWork;
import org.apache.hyracks.control.cc.work.GetJobStatusWork;
import org.apache.hyracks.control.cc.work.GetNodeControllersInfoWork;
+import org.apache.hyracks.control.cc.work.GetThreadDumpWork;
+import org.apache.hyracks.control.cc.work.GetThreadDumpWork.ThreadDumpRun;
import org.apache.hyracks.control.cc.work.GetNodeDetailsJSONWork;
import org.apache.hyracks.control.cc.work.GetResultPartitionLocationsWork;
import org.apache.hyracks.control.cc.work.GetResultStatusWork;
@@ -75,6 +78,7 @@
import org.apache.hyracks.control.cc.work.NotifyDeployBinaryWork;
import org.apache.hyracks.control.cc.work.NotifyShutdownWork;
import org.apache.hyracks.control.cc.work.NotifyStateDumpResponse;
+import org.apache.hyracks.control.cc.work.NotifyThreadDumpResponse;
import org.apache.hyracks.control.cc.work.RegisterNodeWork;
import org.apache.hyracks.control.cc.work.RegisterPartitionAvailibilityWork;
import org.apache.hyracks.control.cc.work.RegisterPartitionRequestWork;
@@ -156,6 +160,8 @@
private final Map<String, StateDumpRun> stateDumpRunMap;
+ private final Map<String, ThreadDumpRun> threadDumpRunMap;
+
private ShutdownRun shutdownCallback;
private ICCApplicationEntryPoint aep;
@@ -204,6 +210,7 @@
deploymentRunMap = new HashMap<>();
stateDumpRunMap = new HashMap<>();
+ threadDumpRunMap = Collections.synchronizedMap(new HashMap<>());
}
private static ClusterTopology computeClusterTopology(CCConfig ccConfig) throws Exception {
@@ -520,6 +527,13 @@
workQueue.schedule(new GetNodeDetailsJSONWork(ClusterControllerService.this, gndjf.getNodeId(),
gndjf.isIncludeStats(), gndjf.isIncludeConfig(), new IPCResponder<>(handle, mid)));
return;
+
+ case THREAD_DUMP:
+ HyracksClientInterfaceFunctions.ThreadDumpFunction tdf =
+ (HyracksClientInterfaceFunctions.ThreadDumpFunction) fn;
+ workQueue.schedule(new GetThreadDumpWork(ClusterControllerService.this, tdf.getNode(),
+ new IPCResponder<String>(handle, mid)));
+ return;
}
try {
handle.send(mid, null, new IllegalArgumentException("Unknown function " + fn.getFunctionId()));
@@ -658,11 +672,21 @@
dsrf.getStateDumpId(), dsrf.getState()));
return;
}
+
case SHUTDOWN_RESPONSE: {
CCNCFunctions.ShutdownResponseFunction sdrf = (ShutdownResponseFunction) fn;
workQueue.schedule(new NotifyShutdownWork(ClusterControllerService.this, sdrf.getNodeId()));
return;
}
+
+ case THREAD_DUMP_RESPONSE: {
+ CCNCFunctions.ThreadDumpResponseFunction tdrf =
+ (CCNCFunctions.ThreadDumpResponseFunction)fn;
+ workQueue.schedule(new NotifyThreadDumpResponse(ClusterControllerService.this,
+ tdrf.getRequestId(), tdrf.getThreadDumpJSON()));
+ return;
+
+ }
}
LOGGER.warning("Unknown function: " + fn.getFunctionId());
}
@@ -715,4 +739,12 @@
public synchronized ShutdownRun getShutdownRun() {
return shutdownCallback;
}
+
+ public void addThreadDumpRun(String requestKey, ThreadDumpRun tdr) {
+ threadDumpRunMap.put(requestKey, tdr);
+ }
+
+ public ThreadDumpRun removeThreadDumpRun(String requestKey) {
+ return threadDumpRunMap.remove(requestKey);
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
index a848c6e..a4d44b1 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
@@ -19,7 +19,7 @@
package org.apache.hyracks.control.cc;
import java.io.File;
-import java.util.Arrays;
+import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -33,7 +33,6 @@
import org.apache.hyracks.control.common.heartbeat.HeartbeatData;
import org.apache.hyracks.control.common.heartbeat.HeartbeatSchema;
import org.apache.hyracks.control.common.heartbeat.HeartbeatSchema.GarbageCollectorInfo;
-import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@@ -275,7 +274,7 @@
return messagingPort;
}
- public JSONObject toSummaryJSON() throws JSONException {
+ public synchronized JSONObject toSummaryJSON() throws JSONException {
JSONObject o = new JSONObject();
o.put("node-id", ncConfig.nodeId);
o.put("heap-used", heapUsedSize[(rrdPtr + RRD_SIZE - 1) % RRD_SIZE]);
@@ -288,6 +287,7 @@
JSONObject o = new JSONObject();
o.put("node-id", ncConfig.nodeId);
+
if (includeConfig) {
o.put("os-name", osName);
o.put("arch", arch);
@@ -296,14 +296,15 @@
o.put("vm-name", vmName);
o.put("vm-version", vmVersion);
o.put("vm-vendor", vmVendor);
- o.put("classpath", new JSONArray(Arrays.asList(classpath.split(File.pathSeparator))));
- o.put("library-path", new JSONArray(Arrays.asList(libraryPath.split(File.pathSeparator))));
- o.put("boot-classpath", new JSONArray(Arrays.asList(bootClasspath.split(File.pathSeparator))));
- o.put("input-arguments", new JSONArray(inputArguments));
- o.put("system-properties", new JSONObject(systemProperties));
+ o.put("classpath", classpath.split(File.pathSeparator));
+ o.put("library-path", libraryPath.split(File.pathSeparator));
+ o.put("boot-classpath", bootClasspath.split(File.pathSeparator));
+ o.put("input-arguments", inputArguments);
+ o.put("system-properties", systemProperties);
o.put("pid", pid);
}
if (includeStats) {
+ o.put("date", new Date());
o.put("rrd-ptr", rrdPtr);
o.put("heartbeat-times", hbTime);
o.put("heap-init-sizes", heapInitSize);
@@ -338,4 +339,5 @@
return o;
}
+
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
index 6d20874..dab41ba 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
@@ -18,14 +18,34 @@
*/
package org.apache.hyracks.control.cc.work;
-import org.apache.hyracks.control.common.work.IPCResponder;
-import org.json.JSONObject;
+import java.io.File;
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
+import java.lang.management.MemoryUsage;
+import java.lang.management.OperatingSystemMXBean;
+import java.lang.management.RuntimeMXBean;
+import java.lang.management.ThreadMXBean;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
+import org.apache.hyracks.control.common.utils.PidHelper;
+import org.apache.hyracks.control.common.work.IPCResponder;
import org.apache.hyracks.control.common.work.SynchronizableWork;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.kohsuke.args4j.Option;
public class GetNodeDetailsJSONWork extends SynchronizableWork {
+ private static final Logger LOGGER = Logger.getLogger(GetNodeDetailsJSONWork.class.getName());
private final ClusterControllerService ccs;
private final String nodeId;
private final boolean includeStats;
@@ -49,9 +69,20 @@
@Override
protected void doRun() throws Exception {
- NodeControllerState ncs = ccs.getNodeMap().get(nodeId);
- if (ncs != null) {
- detail = ncs.toDetailedJSON(includeStats, includeConfig);
+ if (nodeId == null) {
+ // null nodeId is a request for CC
+ detail = getCCDetails();
+ if (includeConfig) {
+ addIni(detail, ccs.getCCConfig());
+ }
+ } else {
+ NodeControllerState ncs = ccs.getNodeMap().get(nodeId);
+ if (ncs != null) {
+ detail = ncs.toDetailedJSON(includeStats, includeConfig);
+ if (includeConfig) {
+ addIni(detail, ncs.getNCConfig());
+ }
+ }
}
if (callback != null) {
@@ -59,6 +90,89 @@
}
}
+ private JSONObject getCCDetails() throws JSONException {
+ JSONObject o = new JSONObject();
+ MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+ List<GarbageCollectorMXBean> gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
+ ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
+ OperatingSystemMXBean osMXBean = ManagementFactory.getOperatingSystemMXBean();
+ RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
+
+ if (includeConfig) {
+ o.put("os_name", osMXBean.getName());
+ o.put("arch", osMXBean.getArch());
+ o.put("os_version", osMXBean.getVersion());
+ o.put("num_processors", osMXBean.getAvailableProcessors());
+ o.put("vm_name", runtimeMXBean.getVmName());
+ o.put("vm_version", runtimeMXBean.getVmVersion());
+ o.put("vm_vendor", runtimeMXBean.getVmVendor());
+ o.put("classpath", runtimeMXBean.getClassPath().split(File.pathSeparator));
+ o.put("library_path", runtimeMXBean.getLibraryPath().split(File.pathSeparator));
+ o.put("boot_classpath", runtimeMXBean.getBootClassPath().split(File.pathSeparator));
+ o.put("input_arguments", runtimeMXBean.getInputArguments());
+ o.put("system_properties", runtimeMXBean.getSystemProperties());
+ o.put("pid", PidHelper.getPid());
+ }
+ if (includeStats) {
+ MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
+ MemoryUsage nonheapUsage = memoryMXBean.getNonHeapMemoryUsage();
+
+ List<JSONObject> gcs = new ArrayList<>();
+
+ for (GarbageCollectorMXBean gcMXBean : gcMXBeans) {
+ JSONObject gc = new JSONObject();
+ gc.put("name", gcMXBean.getName());
+ gc.put("collection-time", gcMXBean.getCollectionTime());
+ gc.put("collection-count", gcMXBean.getCollectionCount());
+ gcs.add(gc);
+ }
+ o.put("gcs", gcs);
+
+ o.put("date", new Date());
+ o.put("heap_init_size", heapUsage.getInit());
+ o.put("heap_used_size", heapUsage.getUsed());
+ o.put("heap_committed_size", heapUsage.getCommitted());
+ o.put("heap_max_size", heapUsage.getMax());
+ o.put("nonheap_init_size", nonheapUsage.getInit());
+ o.put("nonheap_used_size", nonheapUsage.getUsed());
+ o.put("nonheap_committed_size", nonheapUsage.getCommitted());
+ o.put("nonheap_max_size", nonheapUsage.getMax());
+ o.put("thread_count", threadMXBean.getThreadCount());
+ o.put("peak_thread_count", threadMXBean.getPeakThreadCount());
+ o.put("started_thread_count", threadMXBean.getTotalStartedThreadCount());
+ o.put("system_load_average", osMXBean.getSystemLoadAverage());
+ }
+ return o;
+ }
+
+ private static void addIni(JSONObject o, Object configBean) throws JSONException {
+ Map<String, Object> iniMap = new HashMap<>();
+ for (Field f : configBean.getClass().getFields()) {
+ Option option = f.getAnnotation(Option.class);
+ if (option == null) {
+ continue;
+ }
+ final String optionName = option.name();
+ Object value = null;
+ try {
+ value = f.get(configBean);
+ } catch (IllegalAccessException e) {
+ LOGGER.log(Level.WARNING, "Unable to access ini option " + optionName, e);
+ }
+ if (value != null) {
+ if ("--".equals(optionName)) {
+ iniMap.put("app_args", value);
+ } else {
+ iniMap.put(optionName.substring(1).replace('-', '_'),
+ "-iodevices".equals(optionName)
+ ? String.valueOf(value).split(",")
+ : value);
+ }
+ }
+ }
+ o.put("ini", iniMap);
+ }
+
public JSONObject getDetail() {
return detail;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
new file mode 100644
index 0000000..b18d388
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.cc.work;
+
+import java.lang.management.ManagementFactory;
+import java.util.UUID;
+
+import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.control.cc.NodeControllerState;
+import org.apache.hyracks.control.common.work.IResultCallback;
+import org.apache.hyracks.control.common.work.ThreadDumpWork;
+
+public class GetThreadDumpWork extends ThreadDumpWork {
+ private final ClusterControllerService ccs;
+ private final String nodeId;
+ private final IResultCallback<String> callback;
+ private final ThreadDumpRun run;
+
+
+ public GetThreadDumpWork(ClusterControllerService ccs, String nodeId, IResultCallback<String> callback) {
+ this.ccs = ccs;
+ this.nodeId = nodeId;
+ this.callback = callback;
+ this.run = new ThreadDumpRun(UUID.randomUUID().toString());
+ }
+
+ @Override
+ protected void doRun() throws Exception {
+ if (nodeId == null) {
+ // null nodeId means the request is for the cluster controller
+ callback.setValue(takeDump(ManagementFactory.getThreadMXBean()));
+ } else {
+ final NodeControllerState ncState = ccs.getNodeMap().get(nodeId);
+ if (ncState == null) {
+ // bad node id, reply with null immediately
+ callback.setValue(null);
+ } else {
+ ccs.addThreadDumpRun(run.getRequestId(), run);
+ ncState.getNodeController().takeThreadDump(run.getRequestId());
+ }
+ }
+ }
+
+ public class ThreadDumpRun {
+ private final String requestId;
+
+ public ThreadDumpRun(String requestId) {
+ this.requestId = requestId;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public synchronized void notifyThreadDumpReceived(String threadDumpJSON) {
+ callback.setValue(threadDumpJSON);
+ }
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
new file mode 100644
index 0000000..bbdf211
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/NotifyThreadDumpResponse.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.cc.work;
+
+import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.control.common.work.AbstractWork;
+
+public class NotifyThreadDumpResponse extends AbstractWork {
+
+ private final ClusterControllerService ccs;
+
+ private final String requestId;
+ private final String threadDumpJSON;
+
+ public NotifyThreadDumpResponse(ClusterControllerService ccs, String requestId, String threadDumpJSON) {
+ this.ccs = ccs;
+ this.requestId = requestId;
+ this.threadDumpJSON = threadDumpJSON;
+ }
+
+ @Override
+ public void run() {
+ ccs.removeThreadDumpRun(requestId).notifyThreadDumpReceived(threadDumpJSON);
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
index ec1613d..a0c0f95 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
@@ -70,4 +70,6 @@
public void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition) throws Exception;
public void getNodeControllerInfos() throws Exception;
+
+ public void notifyThreadDump(String nodeId, String requestId, String threadDumpJSON) throws Exception;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
index 75c3127..bd550b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
@@ -54,4 +54,6 @@
public void shutdown() throws Exception;
public void sendApplicationMessageToNC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception;
+
+ public void takeThreadDump(String requestId) throws Exception;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
index 96dca4e..aeb2de7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/CCNCFunctions.java
@@ -101,6 +101,9 @@
STATE_DUMP_REQUEST,
STATE_DUMP_RESPONSE,
+ THREAD_DUMP_REQUEST,
+ THREAD_DUMP_RESPONSE,
+
OTHER
}
@@ -895,6 +898,54 @@
}
}
+ public static class ThreadDumpRequestFunction extends Function {
+ private static final long serialVersionUID = 1L;
+ private final String requestId;
+
+ public ThreadDumpRequestFunction(String requestId) {
+ this.requestId = requestId;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.THREAD_DUMP_REQUEST;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+ }
+
+ public static class ThreadDumpResponseFunction extends Function {
+ private static final long serialVersionUID = 1L;
+ private final String nodeId;
+ private final String requestId;
+ private final String threadDumpJSON;
+
+ public ThreadDumpResponseFunction(String nodeId, String requestId, String threadDumpJSON) {
+ this.nodeId = nodeId;
+ this.requestId = requestId;
+ this.threadDumpJSON = threadDumpJSON;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.THREAD_DUMP_RESPONSE;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public String getThreadDumpJSON() {
+ return threadDumpJSON;
+ }
+ }
+
public static class ReportPartitionAvailabilityFunction extends Function {
private static final long serialVersionUID = 1L;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index 416b064..ac6fc2c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -116,24 +116,27 @@
ipcHandle.send(-1, fn, null);
}
- public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, boolean emptyResult, int partition,
- int nPartitions, NetworkAddress networkAddress) throws Exception {
- CCNCFunctions.RegisterResultPartitionLocationFunction fn = new CCNCFunctions.RegisterResultPartitionLocationFunction(
- jobId, rsId, orderedResult, emptyResult, partition, nPartitions, networkAddress);
+ @Override
+ public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult,
+ boolean emptyResult, int partition, int nPartitions,
+ NetworkAddress networkAddress) throws Exception {
+ CCNCFunctions.RegisterResultPartitionLocationFunction fn =
+ new CCNCFunctions.RegisterResultPartitionLocationFunction(jobId, rsId, orderedResult, emptyResult,
+ partition, nPartitions, networkAddress);
ipcHandle.send(-1, fn, null);
}
@Override
public void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception {
- CCNCFunctions.ReportResultPartitionWriteCompletionFunction fn = new CCNCFunctions.ReportResultPartitionWriteCompletionFunction(
- jobId, rsId, partition);
+ CCNCFunctions.ReportResultPartitionWriteCompletionFunction fn =
+ new CCNCFunctions.ReportResultPartitionWriteCompletionFunction(jobId, rsId, partition);
ipcHandle.send(-1, fn, null);
}
@Override
public void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition) throws Exception {
- CCNCFunctions.ReportResultPartitionFailureFunction fn = new CCNCFunctions.ReportResultPartitionFailureFunction(
- jobId, rsId, partition);
+ CCNCFunctions.ReportResultPartitionFailureFunction fn =
+ new CCNCFunctions.ReportResultPartitionFailureFunction(jobId, rsId, partition);
ipcHandle.send(-1, fn, null);
}
@@ -144,14 +147,20 @@
@Override
public void notifyStateDump(String nodeId, String stateDumpId, String state) throws Exception {
- CCNCFunctions.StateDumpResponseFunction fn = new CCNCFunctions.StateDumpResponseFunction(nodeId, stateDumpId,
- state);
+ CCNCFunctions.StateDumpResponseFunction fn =
+ new CCNCFunctions.StateDumpResponseFunction(nodeId, stateDumpId, state);
ipcHandle.send(-1, fn, null);
}
@Override
public void notifyShutdown(String nodeId) throws Exception{
CCNCFunctions.ShutdownResponseFunction sdrf = new CCNCFunctions.ShutdownResponseFunction(nodeId);
- ipcHandle.send(-1,sdrf,null);
+ ipcHandle.send(-1, sdrf, null);
}
+ @Override
+ public void notifyThreadDump(String nodeId, String requestId, String threadDumpJSON) throws Exception {
+ CCNCFunctions.ThreadDumpResponseFunction tdrf =
+ new CCNCFunctions.ThreadDumpResponseFunction(nodeId, requestId, threadDumpJSON);
+ ipcHandle.send(-1, tdrf, null);
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
index 841c889..e4682dc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
@@ -101,4 +101,10 @@
deploymentId, nodeId);
ipcHandle.send(-1, fn, null);
}
+
+ @Override
+ public void takeThreadDump(String requestId) throws Exception {
+ CCNCFunctions.ThreadDumpRequestFunction fn = new CCNCFunctions.ThreadDumpRequestFunction(requestId);
+ ipcHandle.send(-1, fn, null);
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/ThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/ThreadDumpWork.java
new file mode 100644
index 0000000..bf1965d
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/ThreadDumpWork.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.work;
+
+import java.lang.management.ThreadInfo;
+import java.lang.management.ThreadMXBean;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public abstract class ThreadDumpWork extends SynchronizableWork {
+
+ protected String takeDump(ThreadMXBean threadMXBean) throws JSONException {
+ ThreadInfo [] threadInfos = threadMXBean.dumpAllThreads(true, true);
+ List<Map<String, Object>> threads = new ArrayList<>();
+
+ for (ThreadInfo thread : threadInfos) {
+ Map<String, Object> threadMap = new HashMap<>();
+ threadMap.put("name", thread.getThreadName());
+ threadMap.put("id", thread.getThreadId());
+ threadMap.put("state", thread.getThreadState().name());
+ List<String> stacktrace = new ArrayList<>();
+ for (StackTraceElement element : thread.getStackTrace()) {
+ stacktrace.add(element.toString());
+ }
+ threadMap.put("stack", stacktrace);
+
+ if (thread.getLockName() != null) {
+ threadMap.put("lock_name", thread.getLockName());
+ }
+ if (thread.getLockOwnerId() != -1) {
+ threadMap.put("lock_owner_id", thread.getLockOwnerId());
+ }
+ if (thread.getBlockedTime() > 0) {
+ threadMap.put("blocked_time", thread.getBlockedTime());
+ }
+ if (thread.getBlockedCount() > 0) {
+ threadMap.put("blocked_count", thread.getBlockedCount());
+ }
+ if (thread.getLockedMonitors().length > 0) {
+ threadMap.put("locked_monitors", thread.getLockedMonitors());
+ }
+ if (thread.getLockedSynchronizers().length > 0) {
+ threadMap.put("locked_synchronizers", thread.getLockedSynchronizers());
+ }
+ threads.add(threadMap);
+ }
+ JSONObject json = new JSONObject();
+ json.put("date", new Date());
+ json.put("threads", threads);
+
+ long [] deadlockedThreads = threadMXBean.findDeadlockedThreads();
+ long [] monitorDeadlockedThreads = threadMXBean.findMonitorDeadlockedThreads();
+ if (deadlockedThreads != null && deadlockedThreads.length > 0) {
+ json.put("deadlocked_thread_ids", deadlockedThreads);
+ }
+ if (monitorDeadlockedThreads != null && monitorDeadlockedThreads.length > 0) {
+ json.put("monitor_deadlocked_thread_ids", monitorDeadlockedThreads);
+ }
+ return json.toString();
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index edadf57..dbf3af0 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -88,6 +88,7 @@
import org.apache.hyracks.control.nc.work.ShutdownWork;
import org.apache.hyracks.control.nc.work.StartTasksWork;
import org.apache.hyracks.control.nc.work.StateDumpWork;
+import org.apache.hyracks.control.nc.work.NodeThreadDumpWork;
import org.apache.hyracks.control.nc.work.UnDeployBinaryWork;
import org.apache.hyracks.ipc.api.IIPCHandle;
import org.apache.hyracks.ipc.api.IIPCI;
@@ -416,6 +417,10 @@
return queue;
}
+ public ThreadMXBean getThreadMXBean() {
+ return threadMXBean;
+ }
+
private class HeartbeatTask extends TimerTask {
private IClusterController cc;
@@ -509,72 +514,71 @@
Exception exception) {
CCNCFunctions.Function fn = (CCNCFunctions.Function) payload;
switch (fn.getFunctionId()) {
- case SEND_APPLICATION_MESSAGE: {
- CCNCFunctions.SendApplicationMessageFunction amf = (CCNCFunctions.SendApplicationMessageFunction) fn;
+ case SEND_APPLICATION_MESSAGE:
+ CCNCFunctions.SendApplicationMessageFunction amf =
+ (CCNCFunctions.SendApplicationMessageFunction) fn;
queue.schedule(new ApplicationMessageWork(NodeControllerService.this, amf.getMessage(),
amf.getDeploymentId(), amf.getNodeId()));
return;
- }
- case START_TASKS: {
+
+ case START_TASKS:
CCNCFunctions.StartTasksFunction stf = (CCNCFunctions.StartTasksFunction) fn;
queue.schedule(new StartTasksWork(NodeControllerService.this, stf.getDeploymentId(), stf.getJobId(),
stf.getPlanBytes(), stf.getTaskDescriptors(), stf.getConnectorPolicies(), stf.getFlags()));
return;
- }
- case ABORT_TASKS: {
+ case ABORT_TASKS:
CCNCFunctions.AbortTasksFunction atf = (CCNCFunctions.AbortTasksFunction) fn;
queue.schedule(new AbortTasksWork(NodeControllerService.this, atf.getJobId(), atf.getTasks()));
return;
- }
- case CLEANUP_JOBLET: {
+ case CLEANUP_JOBLET:
CCNCFunctions.CleanupJobletFunction cjf = (CCNCFunctions.CleanupJobletFunction) fn;
queue.schedule(new CleanupJobletWork(NodeControllerService.this, cjf.getJobId(), cjf.getStatus()));
return;
- }
- case REPORT_PARTITION_AVAILABILITY: {
- CCNCFunctions.ReportPartitionAvailabilityFunction rpaf = (CCNCFunctions.ReportPartitionAvailabilityFunction) fn;
+ case REPORT_PARTITION_AVAILABILITY:
+ CCNCFunctions.ReportPartitionAvailabilityFunction rpaf =
+ (CCNCFunctions.ReportPartitionAvailabilityFunction) fn;
queue.schedule(new ReportPartitionAvailabilityWork(NodeControllerService.this,
rpaf.getPartitionId(), rpaf.getNetworkAddress()));
return;
- }
- case NODE_REGISTRATION_RESULT: {
+ case NODE_REGISTRATION_RESULT:
CCNCFunctions.NodeRegistrationResult nrrf = (CCNCFunctions.NodeRegistrationResult) fn;
setNodeRegistrationResult(nrrf.getNodeParameters(), nrrf.getException());
return;
- }
- case GET_NODE_CONTROLLERS_INFO_RESPONSE: {
- CCNCFunctions.GetNodeControllersInfoResponseFunction gncirf = (CCNCFunctions.GetNodeControllersInfoResponseFunction) fn;
+ case GET_NODE_CONTROLLERS_INFO_RESPONSE:
+ CCNCFunctions.GetNodeControllersInfoResponseFunction gncirf =
+ (CCNCFunctions.GetNodeControllersInfoResponseFunction) fn;
setNodeControllersInfo(gncirf.getNodeControllerInfos());
return;
- }
- case DEPLOY_BINARY: {
- CCNCFunctions.DeployBinaryFunction ndbf = (CCNCFunctions.DeployBinaryFunction) fn;
- queue.schedule(new DeployBinaryWork(NodeControllerService.this, ndbf.getDeploymentId(),
- ndbf.getBinaryURLs()));
+ case DEPLOY_BINARY:
+ CCNCFunctions.DeployBinaryFunction dbf = (CCNCFunctions.DeployBinaryFunction) fn;
+ queue.schedule(new DeployBinaryWork(NodeControllerService.this, dbf.getDeploymentId(),
+ dbf.getBinaryURLs()));
return;
- }
- case UNDEPLOY_BINARY: {
+ case UNDEPLOY_BINARY:
CCNCFunctions.UnDeployBinaryFunction ndbf = (CCNCFunctions.UnDeployBinaryFunction) fn;
queue.schedule(new UnDeployBinaryWork(NodeControllerService.this, ndbf.getDeploymentId()));
return;
- }
- case STATE_DUMP_REQUEST: {
+ case STATE_DUMP_REQUEST:
final CCNCFunctions.StateDumpRequestFunction dsrf = (StateDumpRequestFunction) fn;
queue.schedule(new StateDumpWork(NodeControllerService.this, dsrf.getStateDumpId()));
return;
- }
- case SHUTDOWN_REQUEST: {
+
+ case SHUTDOWN_REQUEST:
queue.schedule(new ShutdownWork(NodeControllerService.this));
return;
- }
+
+ case THREAD_DUMP_REQUEST:
+ final CCNCFunctions.ThreadDumpRequestFunction tdrf = (CCNCFunctions.ThreadDumpRequestFunction) fn;
+ queue.schedule(new NodeThreadDumpWork(NodeControllerService.this, tdrf.getRequestId()));
+ return;
}
throw new IllegalArgumentException("Unknown function: " + fn.getFunctionId());
@@ -594,7 +598,7 @@
}
/**
- * Shutdown hook that invokes {@link NCApplicationEntryPoint#stop() stop} method.
+ * Shutdown hook that invokes {@link NodeControllerService#stop() stop} method.
*/
private static class JVMShutdownHook extends Thread {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NodeThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NodeThreadDumpWork.java
new file mode 100644
index 0000000..1fc4690
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/NodeThreadDumpWork.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.nc.work;
+
+import org.apache.hyracks.control.common.work.ThreadDumpWork;
+import org.apache.hyracks.control.nc.NodeControllerService;
+
+public class NodeThreadDumpWork extends ThreadDumpWork {
+ private final NodeControllerService ncs;
+ private final String requestId;
+
+ public NodeThreadDumpWork(NodeControllerService ncs, String requestId) {
+ this.ncs = ncs;
+ this.requestId = requestId;
+ }
+
+ @Override
+ protected void doRun() throws Exception {
+ final String result = takeDump(ncs.getThreadMXBean());
+
+ ncs.getClusterController().notifyThreadDump(
+ ncs.getApplicationContext().getNodeId(), requestId, result);
+ }
+}