Configuration Revamp
- Ini section of node / cc details now returns ini param names instead of
managix option names
- Normalized command line -vs- ini file configuration parameter names
- Eliminated unused parameters
- Ini validation
- Migrate *DB parameters out of [app] and into nc / cc sections as
appropriate
- Eliminate [app] section. Cluster-wide configuration lives in [common]
- Sort properties alphabetically when returned by HTTP api
Change-Id: I95b7e0bd4538ef42817c8826e76412150074b754
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1487
Reviewed-by: Michael Blow <mblow@apache.org>
Integration-Tests: Michael Blow <mblow@apache.org>
Tested-by: Michael Blow <mblow@apache.org>
diff --git a/asterixdb/asterix-app/pom.xml b/asterixdb/asterix-app/pom.xml
index 74102a1..ede8cb1 100644
--- a/asterixdb/asterix-app/pom.xml
+++ b/asterixdb/asterix-app/pom.xml
@@ -149,11 +149,11 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
- <ignoredUsedUndeclaredDependencies>
+ <ignoredUsedUndeclaredDependencies combine.children="append">
<ignoredUsedUndeclaredDependency>commons-logging:commons-logging-api:*</ignoredUsedUndeclaredDependency>
<ignoredUsedUndeclaredDependency>org.apache.hive:hive-exec:*</ignoredUsedUndeclaredDependency>
</ignoredUsedUndeclaredDependencies>
- <usedDependencies>
+ <usedDependencies combine.children="append">
<usedDependency>org.apache.hadoop:hadoop-common</usedDependency>
<usedDependency>org.apache.asterix:asterix-external-data</usedDependency>
</usedDependencies>
diff --git a/asterixdb/asterix-app/scripts/asterix/startnc.sh b/asterixdb/asterix-app/scripts/asterix/startnc.sh
index b4b9c3c..43f9e2a 100644
--- a/asterixdb/asterix-app/scripts/asterix/startnc.sh
+++ b/asterixdb/asterix-app/scripts/asterix/startnc.sh
@@ -27,5 +27,5 @@
export JAVA_OPTS="-Xmx10g -Djava.net.preferIPv4Stack=true -Djava.io.tmpdir=/mnt/data/sdd/space/onose/tmp"
-echo $HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cc-host 10.1.0.1 -cc-port 2222 -data-ip-address $IPADDR -node-id $NODEID
-$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cc-host 10.1.0.1 -cc-port 2222 -data-ip-address $IPADDR -node-id $NODEID &> $LOGSDIR/$NODEID.log &
+echo $HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cluster-address 10.1.0.1 -cluster-port 2222 -data-listen-address $IPADDR -node-id $NODEID
+$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cluster-address 10.1.0.1 -cluster-port 2222 -data-listen-address $IPADDR -node-id $NODEID &> $LOGSDIR/$NODEID.log &
diff --git a/asterixdb/asterix-app/scripts/idefix/startnc1.sh b/asterixdb/asterix-app/scripts/idefix/startnc1.sh
index e17253a..ae15596 100644
--- a/asterixdb/asterix-app/scripts/idefix/startnc1.sh
+++ b/asterixdb/asterix-app/scripts/idefix/startnc1.sh
@@ -22,4 +22,4 @@
export JAVA_OPTS="-DAsterixConfigFileName=test.properties -Djava.util.logging.config.file=/home/nicnic/Work/Asterix/hyracks/logging.properties"
export HYRACKS_HOME="/home/nicnic/workspace/hyracks/tags/hyracks-0.1.5"
-bash ${HYRACKS_HOME}/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 127.0.0.1 -data-ip-address 127.0.0.1 -node-id "nc1" $*
+bash ${HYRACKS_HOME}/hyracks-server/target/appassembler/bin/hyracksnc -cluster-address 127.0.0.1 -data-listen-address 127.0.0.1 -node-id "nc1" $*
diff --git a/asterixdb/asterix-app/scripts/idefix/startnc2.sh b/asterixdb/asterix-app/scripts/idefix/startnc2.sh
index 2074cd7..4a3e370 100644
--- a/asterixdb/asterix-app/scripts/idefix/startnc2.sh
+++ b/asterixdb/asterix-app/scripts/idefix/startnc2.sh
@@ -20,5 +20,5 @@
export JAVA_OPTS="-DAsterixConfigFileName=test.properties -Djava.util.logging.config.file=/home/nicnic/Work/Asterix/hyracks/logging.properties"
export HYRACKS_HOME="/home/nicnic/workspace/hyracks/tags/hyracks-0.1.5"
-bash ${HYRACKS_HOME}/hyracks-server/target/appassembler/bin/hyracksnc -cc-host 127.0.0.1 -data-ip-address 127.0.0.1 -node-id "nc2" $*
+bash ${HYRACKS_HOME}/hyracks-server/target/appassembler/bin/hyracksnc -cluster-address 127.0.0.1 -data-listen-address 127.0.0.1 -node-id "nc2" $*
diff --git a/asterixdb/asterix-app/scripts/rainbow/startnc.sh b/asterixdb/asterix-app/scripts/rainbow/startnc.sh
index 5f1dadf..3d7b75e 100644
--- a/asterixdb/asterix-app/scripts/rainbow/startnc.sh
+++ b/asterixdb/asterix-app/scripts/rainbow/startnc.sh
@@ -27,5 +27,5 @@
export JAVA_OPTS="-DNodeControllerDataPath=/tmp/ncX/"
-echo $HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cc-host 128.195.52.177 -cc-port 2222 -data-ip-address $IPADDR -node-id $NODEID
-$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cc-host 128.195.52.177 -cc-port 2222 -data-ip-address $IPADDR -node-id $NODEID &> $LOGSDIR/$NODEID.log &
+echo $HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cluster-address 128.195.52.177 -cluster-port 2222 -data-listen-address $IPADDR -node-id $NODEID
+$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.1.3.1-binary-assembly/bin/hyracksnc -cluster-address 128.195.52.177 -cluster-port 2222 -data-listen-address $IPADDR -node-id $NODEID &> $LOGSDIR/$NODEID.log &
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
index 0f16179..0759599 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -34,9 +34,8 @@
import org.apache.asterix.app.result.ResultUtil;
import org.apache.asterix.common.config.CompilerProperties;
import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.common.config.IPropertyInterpreter;
import org.apache.asterix.common.config.OptimizationConfUtil;
-import org.apache.asterix.common.config.PropertyInterpreters;
+import org.apache.hyracks.control.common.config.OptionTypes;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.utils.Job;
@@ -96,6 +95,7 @@
import org.apache.hyracks.api.job.JobSpecification;
import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.hyracks.api.config.IOptionType;
/**
* Provides helper methods for compilation of a query into a JobSpec and submission
@@ -451,16 +451,15 @@
// Gets the frame limit.
private int getFrameLimit(String parameter, long memBudgetInConfiguration, int frameSize) {
- IPropertyInterpreter<Long> longBytePropertyInterpreter = PropertyInterpreters.getLongBytePropertyInterpreter();
+ IOptionType<Long> longBytePropertyInterpreter = OptionTypes.LONG_BYTE_UNIT;
long memBudget =
- parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.interpret(parameter);
+ parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.parse(parameter);
return (int) (memBudget / frameSize);
}
// Gets the parallelism parameter.
private int getParallelism(String parameter, int parallelismInConfiguration) {
- IPropertyInterpreter<Integer> integerIPropertyInterpreter =
- PropertyInterpreters.getIntegerPropertyInterpreter();
- return parameter == null ? parallelismInConfiguration : integerIPropertyInterpreter.interpret(parameter);
+ IOptionType<Integer> integerIPropertyInterpreter = OptionTypes.INTEGER;
+ return parameter == null ? parallelismInConfiguration : integerIPropertyInterpreter.parse(parameter);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index 54804a1..fbb2208 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -18,6 +18,17 @@
*/
package org.apache.asterix.api.common;
+import static org.apache.asterix.api.common.AsterixHyracksIntegrationUtil.LoggerHolder.LOGGER;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.Inet4Address;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.config.PropertiesAccessor;
@@ -26,28 +37,19 @@
import org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.commons.io.FileUtils;
+import org.apache.hyracks.api.application.ICCApplicationEntryPoint;
+import org.apache.hyracks.api.application.INCApplicationEntryPoint;
import org.apache.hyracks.api.client.HyracksConnection;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.job.JobFlag;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.nc.NodeControllerService;
-import java.io.File;
-import java.io.IOException;
-import java.net.Inet4Address;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import static org.apache.asterix.api.common.AsterixHyracksIntegrationUtil.LoggerHolder.LOGGER;
-
public class AsterixHyracksIntegrationUtil {
static class LoggerHolder {
static final Logger LOGGER = Logger.getLogger(AsterixHyracksIntegrationUtil.class.getName());
@@ -63,29 +65,38 @@
public NodeControllerService[] ncs;
public IHyracksClientConnection hcc;
- private PropertiesAccessor propertiesAccessor;
+ private ConfigManager configManager;
+ private List<String> nodeNames;
public void init(boolean deleteOldInstanceData) throws Exception {
ncs = new NodeControllerService[0]; // ensure that ncs is not null
- final CCConfig ccConfig = createCCConfig();
- propertiesAccessor = PropertiesAccessor.getInstance(ccConfig.getAppConfig());
+ final ICCApplicationEntryPoint ccAppEntryPoint = createCCAppEntryPoint();
+ configManager = new ConfigManager();
+ ccAppEntryPoint.registerConfig(configManager);
+ final CCConfig ccConfig = createCCConfig(configManager);
+ cc = new ClusterControllerService(ccConfig, ccAppEntryPoint);
+
+ nodeNames = ccConfig.getConfigManager().getNodeNames();
if (deleteOldInstanceData) {
deleteTransactionLogs();
removeTestStorageFiles();
}
+ final List<NCConfig> ncConfigs = new ArrayList<>();
+ nodeNames.forEach(nodeId -> ncConfigs.add(createNCConfig(nodeId, configManager)));
+ final PropertiesAccessor accessor = PropertiesAccessor.getInstance(configManager.getAppConfig());
+ ncConfigs.forEach((ncConfig1) -> fixupIODevices(ncConfig1, accessor));
- cc = new ClusterControllerService(ccConfig);
cc.start();
// Starts ncs.
- List<String> nodes = propertiesAccessor.getNodeNames();
+ nodeNames = ccConfig.getConfigManager().getNodeNames();
List<NodeControllerService> nodeControllers = new ArrayList<>();
List<Thread> startupThreads = new ArrayList<>();
- for (String ncName : nodes) {
- NodeControllerService nodeControllerService = new NodeControllerService(
- fixupIODevices(createNCConfig(ncName)));
+ for (NCConfig ncConfig : ncConfigs) {
+ final INCApplicationEntryPoint ncAppEntryPoint = createNCAppEntryPoint();
+ NodeControllerService nodeControllerService = new NodeControllerService(ncConfig, ncAppEntryPoint);
nodeControllers.add(nodeControllerService);
- Thread ncStartThread = new Thread("IntegrationUtil-" + ncName) {
+ Thread ncStartThread = new Thread("IntegrationUtil-" + ncConfig.getNodeId()) {
@Override
public void run() {
try {
@@ -102,78 +113,83 @@
for (Thread thread : startupThreads) {
thread.join();
}
+ for (NCConfig ncConfig : ncConfigs) {
+ for (String ioDevice : ncConfig.getIODevices()) {
+ if (!new File(ioDevice).isAbsolute()) {
+ throw new IllegalStateException("iodevice not absolute: " + ioDevice);
+ }
+ }
+ }
// Wait until cluster becomes active
synchronized (ClusterStateManager.INSTANCE) {
while (ClusterStateManager.INSTANCE.getState() != ClusterState.ACTIVE) {
ClusterStateManager.INSTANCE.wait();
}
}
- hcc = new HyracksConnection(cc.getConfig().clientNetIpAddress, cc.getConfig().clientNetPort);
+ hcc = new HyracksConnection(cc.getConfig().getClientListenAddress(), cc.getConfig().getClientListenPort());
ncs = nodeControllers.toArray(new NodeControllerService[nodeControllers.size()]);
}
- protected CCConfig createCCConfig() throws IOException {
- CCConfig ccConfig = new CCConfig();
- ccConfig.clusterNetIpAddress = Inet4Address.getLoopbackAddress().getHostAddress();
- ccConfig.clientNetIpAddress = Inet4Address.getLoopbackAddress().getHostAddress();
- ccConfig.clientNetPort = DEFAULT_HYRACKS_CC_CLIENT_PORT;
- ccConfig.clusterNetPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
- ccConfig.defaultMaxJobAttempts = 0;
- ccConfig.resultTTL = 120000;
- ccConfig.resultSweepThreshold = 1000;
- ccConfig.appCCMainClass = CCApplicationEntryPoint.class.getName();
+ protected CCConfig createCCConfig(ConfigManager configManager) throws IOException {
+ CCConfig ccConfig = new CCConfig(configManager);
+ ccConfig.setClusterListenAddress(Inet4Address.getLoopbackAddress().getHostAddress());
+ ccConfig.setClientListenAddress(Inet4Address.getLoopbackAddress().getHostAddress());
+ ccConfig.setClientListenPort(DEFAULT_HYRACKS_CC_CLIENT_PORT);
+ ccConfig.setClusterListenPort(DEFAULT_HYRACKS_CC_CLUSTER_PORT);
+ ccConfig.setResultTTL(120000L);
+ ccConfig.setResultSweepThreshold(1000L);
return ccConfig;
}
- protected NCConfig createNCConfig(String ncName) throws AsterixException, IOException {
- NCConfig ncConfig = new NCConfig();
- ncConfig.ccHost = "localhost";
- ncConfig.ccPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
- ncConfig.clusterNetIPAddress = Inet4Address.getLoopbackAddress().getHostAddress();
- ncConfig.dataIPAddress = Inet4Address.getLoopbackAddress().getHostAddress();
- ncConfig.resultIPAddress = Inet4Address.getLoopbackAddress().getHostAddress();
- ncConfig.messagingIPAddress = Inet4Address.getLoopbackAddress().getHostAddress();
- ncConfig.nodeId = ncName;
- ncConfig.resultTTL = 120000;
- ncConfig.resultSweepThreshold = 1000;
- ncConfig.appArgs = Collections.singletonList("-virtual-NC");
- ncConfig.appNCMainClass = NCApplicationEntryPoint.class.getName();
+ protected ICCApplicationEntryPoint createCCAppEntryPoint() {
+ return new CCApplicationEntryPoint();
+ }
+
+ protected NCConfig createNCConfig(String ncName, ConfigManager configManager) {
+ NCConfig ncConfig = new NCConfig(ncName, configManager);
+ ncConfig.setClusterAddress("localhost");
+ ncConfig.setClusterPort(DEFAULT_HYRACKS_CC_CLUSTER_PORT);
+ ncConfig.setClusterListenAddress(Inet4Address.getLoopbackAddress().getHostAddress());
+ ncConfig.setDataListenAddress(Inet4Address.getLoopbackAddress().getHostAddress());
+ ncConfig.setResultListenAddress(Inet4Address.getLoopbackAddress().getHostAddress());
+ ncConfig.setMessagingListenAddress(Inet4Address.getLoopbackAddress().getHostAddress());
+ ncConfig.setResultTTL(120000L);
+ ncConfig.setResultSweepThreshold(1000L);
+ ncConfig.setVirtualNC(true);
return ncConfig;
}
- private NCConfig fixupIODevices(NCConfig ncConfig) throws AsterixException {
+ protected INCApplicationEntryPoint createNCAppEntryPoint() {
+ return new NCApplicationEntryPoint();
+ }
+
+ private NCConfig fixupIODevices(NCConfig ncConfig, PropertiesAccessor accessor) {
String tempPath = System.getProperty(IO_DIR_KEY);
if (tempPath.endsWith(File.separator)) {
tempPath = tempPath.substring(0, tempPath.length() - 1);
}
LOGGER.info("Using the temp path: " + tempPath);
// get initial partitions from properties
- String[] nodeStores = propertiesAccessor.getStores().get(ncConfig.nodeId);
+ String[] nodeStores = accessor.getStores().get(ncConfig.getNodeId());
if (nodeStores == null) {
- throw new AsterixException("Couldn't find stores for NC: " + ncConfig.nodeId);
+ throw new IllegalStateException("Couldn't find stores for NC: " + ncConfig.getNodeId());
}
String tempDirPath = System.getProperty(IO_DIR_KEY);
if (!tempDirPath.endsWith(File.separator)) {
tempDirPath += File.separator;
}
- for (int p = 0; p < nodeStores.length; p++) {
+ List<String> ioDevices = new ArrayList<>();
+ for (String nodeStore : nodeStores) {
// create IO devices based on stores
- String iodevicePath = tempDirPath + ncConfig.nodeId + File.separator + nodeStores[p];
+ String iodevicePath = tempDirPath + ncConfig.getNodeId() + File.separator + nodeStore;
File ioDeviceDir = new File(iodevicePath);
ioDeviceDir.mkdirs();
- if (p == 0) {
- ncConfig.ioDevices = iodevicePath;
- } else {
- ncConfig.ioDevices += "," + iodevicePath;
- }
+ ioDevices.add(iodevicePath);
}
+ configManager.set(ncConfig.getNodeId(), NCConfig.Option.IODEVICES, ioDevices.toArray(new String[0]));
return ncConfig;
}
- public String[] getNcNames() {
- return propertiesAccessor.getNodeNames().toArray(new String[propertiesAccessor.getNodeNames().size()]);
- }
-
public IHyracksClientConnection getHyracksClientConnection() {
return hcc;
}
@@ -222,15 +238,16 @@
public void removeTestStorageFiles() {
File dir = new File(System.getProperty(IO_DIR_KEY));
- for (String ncName : propertiesAccessor.getNodeNames()) {
+ for (String ncName : nodeNames) {
File ncDir = new File(dir, ncName);
FileUtils.deleteQuietly(ncDir);
}
}
- private void deleteTransactionLogs() throws IOException {
- for (String ncId : propertiesAccessor.getNodeNames()) {
- File log = new File(propertiesAccessor.getTransactionLogDirs().get(ncId));
+ private void deleteTransactionLogs() throws IOException, AsterixException {
+ for (String ncId : nodeNames) {
+ File log = new File(
+ PropertiesAccessor.getInstance(configManager.getAppConfig()).getTransactionLogDirs().get(ncId));
if (log.exists()) {
FileUtils.deleteDirectory(log);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
index 34086e7..eafe312 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
@@ -18,37 +18,37 @@
*/
package org.apache.asterix.api.http.server;
+import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_APP_CONTEXT_INFO_ATTR;
+
import java.io.IOException;
import java.io.PrintWriter;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
import java.util.concurrent.ConcurrentMap;
+import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
-import org.apache.asterix.common.config.AbstractProperties;
-import org.apache.asterix.common.config.ReplicationProperties;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import io.netty.handler.codec.http.HttpHeaderNames;
+import io.netty.handler.codec.http.HttpResponseStatus;
import org.apache.asterix.common.utils.JSONUtil;
+import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.ClusterStateManager;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.ConfigUtils;
+import org.apache.hyracks.control.common.controllers.ControllerConfig;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-
-import io.netty.handler.codec.http.HttpHeaderNames;
-import io.netty.handler.codec.http.HttpResponseStatus;
-
public class ClusterApiServlet extends AbstractServlet {
private static final Logger LOGGER = Logger.getLogger(ClusterApiServlet.class.getName());
private static final Pattern PARENT_DIR = Pattern.compile("/[^./]+/\\.\\./");
- private static final Pattern REPLICATION_PROPERTY = Pattern.compile("^replication\\.");
protected static final String NODE_ID_KEY = "node_id";
protected static final String CONFIG_URI_KEY = "configUri";
protected static final String STATS_URI_KEY = "statsUri";
@@ -57,10 +57,9 @@
protected static final String FULL_SHUTDOWN_URI_KEY = "fullShutdownUri";
protected static final String VERSION_URI_KEY = "versionUri";
protected static final String DIAGNOSTICS_URI_KEY = "diagnosticsUri";
- protected static final String REPLICATION_URI_KEY = "replicationUri";
private final ObjectMapper om = new ObjectMapper();
- public ClusterApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public ClusterApiServlet(ConcurrentMap<String, Object> ctx, String... paths) {
super(ctx, paths);
}
@@ -75,9 +74,6 @@
case "":
json = getClusterStateJSON(request, "");
break;
- case "/replication":
- json = getReplicationJSON();
- break;
case "/summary":
json = getClusterStateSummaryJSON();
break;
@@ -99,35 +95,11 @@
return ClusterStateManager.INSTANCE.getClusterStateSummary();
}
- protected ObjectNode getReplicationJSON() {
- for (AbstractProperties props : getPropertiesInstances()) {
- if (props instanceof ReplicationProperties) {
- ObjectNode json = om.createObjectNode();
- json.putPOJO("config", props.getProperties(key -> REPLICATION_PROPERTY.matcher(key).replaceFirst("")));
- return json;
- }
- }
- throw new IllegalStateException("ERROR: replication properties not found");
- }
-
- protected Map<String, Object> getAllClusterProperties() {
- Map<String, Object> allProperties = new HashMap<>();
- for (AbstractProperties properties : getPropertiesInstances()) {
- if (!(properties instanceof ReplicationProperties)) {
- allProperties.putAll(properties.getProperties());
- }
- }
- return allProperties;
- }
-
- protected List<AbstractProperties> getPropertiesInstances() {
- return AbstractProperties.getImplementations();
- }
-
protected ObjectNode getClusterStateJSON(IServletRequest request, String pathToNode) {
ObjectNode json = ClusterStateManager.INSTANCE.getClusterStateDescription();
- Map<String, Object> allProperties = getAllClusterProperties();
- json.putPOJO("config", allProperties);
+ AppContextInfo appConfig = (AppContextInfo) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
+ json.putPOJO("config", ConfigUtils.getSectionOptionsForJSON(appConfig.getCCApplicationContext().getAppConfig(),
+ Section.COMMON, getConfigSelector()));
ArrayNode ncs = (ArrayNode) json.get("ncs");
final StringBuilder requestURL = new StringBuilder("http://");
@@ -156,7 +128,6 @@
cc.put(CONFIG_URI_KEY, clusterURL + "cc/config");
cc.put(STATS_URI_KEY, clusterURL + "cc/stats");
cc.put(THREAD_DUMP_URI_KEY, clusterURL + "cc/threaddump");
- json.put(REPLICATION_URI_KEY, clusterURL + "replication");
json.put(SHUTDOWN_URI_KEY, adminURL + "shutdown");
json.put(FULL_SHUTDOWN_URI_KEY, adminURL + "shutdown?all=true");
json.put(VERSION_URI_KEY, adminURL + "version");
@@ -164,6 +135,11 @@
return json;
}
+ protected Predicate<IOption> getConfigSelector() {
+ return option -> option != ControllerConfig.Option.CONFIG_FILE
+ && option != ControllerConfig.Option.CONFIG_FILE_URL;
+ }
+
private String canonicalize(CharSequence requestURL) {
String clusterURL = "";
String newClusterURL = requestURL.toString();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java
index d680e6e..52d4d67 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterControllerDetailsApiServlet.java
@@ -41,7 +41,7 @@
private static final Logger LOGGER = Logger.getLogger(ClusterControllerDetailsApiServlet.class.getName());
private final ObjectMapper om = new ObjectMapper();
- public ClusterControllerDetailsApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public ClusterControllerDetailsApiServlet(ConcurrentMap<String, Object> ctx, String... paths) {
super(ctx, paths);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
index ffe62b4..de227eb 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
@@ -50,7 +50,7 @@
public class DiagnosticsApiServlet extends NodeControllerDetailsApiServlet {
private static final Logger LOGGER = Logger.getLogger(DiagnosticsApiServlet.class.getName());
- public DiagnosticsApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public DiagnosticsApiServlet(ConcurrentMap<String, Object> ctx, String... paths) {
super(ctx, paths);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
index 07e70ab..d9757c7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/NodeControllerDetailsApiServlet.java
@@ -48,7 +48,7 @@
private static final Logger LOGGER = Logger.getLogger(NodeControllerDetailsApiServlet.class.getName());
private final ObjectMapper om = new ObjectMapper();
- public NodeControllerDetailsApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public NodeControllerDetailsApiServlet(ConcurrentMap<String, Object> ctx, String... paths) {
super(ctx, paths);
om.enable(SerializationFeature.INDENT_OUTPUT);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
index 91bebfe..a4cea39 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.api.http.server;
-import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_BUILD_PROP_ATTR;
+import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_APP_CONTEXT_INFO_ATTR;
import java.io.IOException;
import java.io.PrintWriter;
@@ -27,18 +27,15 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import io.netty.handler.codec.http.HttpResponseStatus;
+import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-
-import io.netty.handler.codec.http.HttpMethod;
-import io.netty.handler.codec.http.HttpResponseStatus;
-
public class VersionApiServlet extends AbstractServlet {
private static final Logger LOGGER = Logger.getLogger(VersionApiServlet.class.getName());
@@ -49,7 +46,7 @@
@Override
protected void get(IServletRequest request, IServletResponse response) {
response.setStatus(HttpResponseStatus.OK);
- AppContextInfo props = (AppContextInfo) ctx.get(ASTERIX_BUILD_PROP_ATTR);
+ IPropertiesProvider props = (IPropertiesProvider) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
Map<String, String> buildProperties = props.getBuildProperties().getAllProps();
ObjectMapper om = new ObjectMapper();
ObjectNode responseObject = om.createObjectNode();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ServletConstants.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ServletConstants.java
index d5f31ff..5b96cab 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ServletConstants.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ServletConstants.java
@@ -21,7 +21,7 @@
public class ServletConstants {
public static final String HYRACKS_CONNECTION_ATTR = "org.apache.asterix.HYRACKS_CONNECTION";
public static final String HYRACKS_DATASET_ATTR = "org.apache.asterix.HYRACKS_DATASET";
- public static final String ASTERIX_BUILD_PROP_ATTR = "org.apache.asterix.PROPS";
+ public static final String ASTERIX_APP_CONTEXT_INFO_ATTR = "org.apache.asterix.APP_CONTEXT_INFO";
public static final String EXECUTOR_SERVICE = "org.apache.asterix.EXECUTOR_SERVICE";
private ServletConstants() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
index 625f18f..5eba31d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
@@ -41,6 +41,7 @@
import org.apache.asterix.common.config.FeedProperties;
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.config.NodeProperties;
import org.apache.asterix.common.config.PropertiesAccessor;
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.config.StorageProperties;
@@ -113,6 +114,7 @@
private BuildProperties buildProperties;
private ReplicationProperties replicationProperties;
private MessagingProperties messagingProperties;
+ private final NodeProperties nodeProperties;
private ThreadExecutor threadExecutor;
private IDatasetLifecycleManager datasetLifecycleManager;
private IFileMapManager fileMapManager;
@@ -150,6 +152,7 @@
buildProperties = new BuildProperties(propertiesAccessor);
replicationProperties = new ReplicationProperties(propertiesAccessor);
messagingProperties = new MessagingProperties(propertiesAccessor);
+ nodeProperties = new NodeProperties(propertiesAccessor);
libraryManager = new ExternalLibraryManager();
if (extensions != null) {
allExtensions.addAll(extensions);
@@ -220,7 +223,7 @@
//PersistentLocalResourceRepository to replicate metadata files and delete backups on drop index
localResourceRepository.setReplicationManager(replicationManager);
- /**
+ /*
* add the partitions that will be replicated in this node as inactive partitions
*/
//get nodes which replicate to this node
@@ -254,12 +257,12 @@
*/
ILifeCycleComponentManager lccm = ncApplicationContext.getLifeCycleComponentManager();
lccm.register((ILifeCycleComponent) bufferCache);
- /**
+ /*
* LogManager must be stopped after RecoveryManager, DatasetLifeCycleManager, and ReplicationManager
* to process any logs that might be generated during stopping these components
*/
lccm.register((ILifeCycleComponent) txnSubsystem.getLogManager());
- /**
+ /*
* ReplicationManager must be stopped after indexLifecycleManager and recovery manager
* so that any logs/files generated during closing datasets or checkpoints are sent to remote replicas
*/
@@ -267,7 +270,7 @@
lccm.register(replicationManager);
}
lccm.register((ILifeCycleComponent) txnSubsystem.getRecoveryManager());
- /**
+ /*
* Stopping indexLifecycleManager will flush and close all datasets.
*/
lccm.register((ILifeCycleComponent) datasetLifecycleManager);
@@ -376,6 +379,11 @@
}
@Override
+ public NodeProperties getNodeProperties() {
+ return nodeProperties;
+ }
+
+ @Override
public ILSMOperationTracker getLSMBTreeOperationTracker(int datasetID) {
return datasetLifecycleManager.getOperationTracker(datasetID);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
index 5104610..8d8a0f2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
@@ -67,6 +67,7 @@
import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.util.FaultToleranceUtil;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class AutoFaultToleranceStrategy implements IFaultToleranceStrategy {
@@ -113,9 +114,7 @@
}
private synchronized void notifyFailbackPlansNodeFailure(String nodeId) {
- Iterator<NodeFailbackPlan> iterator = planId2FailbackPlanMap.values().iterator();
- while (iterator.hasNext()) {
- NodeFailbackPlan plan = iterator.next();
+ for (NodeFailbackPlan plan : planId2FailbackPlanMap.values()) {
plan.notifyNodeFailure(nodeId);
}
}
@@ -173,7 +172,7 @@
try {
messageBroker.sendApplicationMessageToNC(takeoverRequest, replica);
} catch (Exception e) {
- /**
+ /*
* if we fail to send the request, it means the NC we tried to send the request to
* has failed. When the failure notification arrives, we will send any pending request
* that belongs to the failed NC to a different active replica.
@@ -186,7 +185,7 @@
private boolean addActiveReplica(String replica, ClusterPartition partition,
Map<String, List<Integer>> partitionRecoveryPlan) {
- Map<String, Map<String, String>> activeNcConfiguration = clusterManager.getActiveNcConfiguration();
+ Map<String, Map<IOption, Object>> activeNcConfiguration = clusterManager.getActiveNcConfiguration();
if (activeNcConfiguration.containsKey(replica) && !failedNodes.contains(replica)) {
if (!partitionRecoveryPlan.containsKey(replica)) {
List<Integer> replicaPartitions = new ArrayList<>();
@@ -213,7 +212,7 @@
ClusterPartition[] nodePartitions = clusterManager.getNodePartitions(replicaId);
for (ClusterPartition partition : nodePartitions) {
plan.addParticipant(partition.getActiveNodeId());
- /**
+ /*
* if the partition original node is the returning node,
* add it to the list of the partitions which will be failed back
*/
@@ -232,7 +231,7 @@
private synchronized void processPendingFailbackPlans() {
ClusterState state = clusterManager.getState();
- /**
+ /*
* if the cluster state is not ACTIVE, then failbacks should not be processed
* since some partitions are not active
*/
@@ -240,7 +239,7 @@
while (!pendingProcessingFailbackPlans.isEmpty()) {
//take the first pending failback plan
NodeFailbackPlan plan = pendingProcessingFailbackPlans.pop();
- /**
+ /*
* A plan at this stage will be in one of two states:
* 1. PREPARING -> the participants were selected but we haven't sent any request.
* 2. PENDING_ROLLBACK -> a participant failed before we send any requests
@@ -253,7 +252,7 @@
clusterManager.updateClusterPartition(partitionId, failbackNode, false);
}
- /**
+ /*
* if the returning node is the original metadata node,
* then metadata node will change after the failback completes
*/
@@ -268,7 +267,7 @@
//force new jobs to wait
clusterManager.setState(ClusterState.REBALANCING);
handleFailbackRequests(plan, messageBroker);
- /**
+ /*
* wait until the current plan is completed before processing the next plan.
* when the current one completes or is reverted, the cluster state will be
* ACTIVE again, and the next failback plan (if any) will be processed.
@@ -305,11 +304,11 @@
clusterPartitionsMap.put(partition.getPartitionId(), partition);
}
for (ClusterPartition partition : clusterPartitons) {
- if (partition.getActiveNodeId().equals(nodeId)) {
+ if (nodeId.equals(partition.getActiveNodeId())) {
nodePartitions.add(partition);
}
}
- /**
+ /*
* if there is any pending takeover request this node was supposed to handle,
* it needs to be sent to a different replica
*/
@@ -359,7 +358,7 @@
public synchronized void process(PreparePartitionsFailbackResponseMessage msg) {
NodeFailbackPlan plan = planId2FailbackPlanMap.get(msg.getPlanId());
plan.markRequestCompleted(msg.getRequestId());
- /**
+ /*
* A plan at this stage will be in one of three states:
* 1. PENDING_PARTICIPANT_REPONSE -> one or more responses are still expected (wait).
* 2. PENDING_COMPLETION -> all responses received (time to send completion request).
@@ -382,7 +381,7 @@
}
public synchronized void process(CompleteFailbackResponseMessage response) throws HyracksDataException {
- /**
+ /*
* the failback plan completed successfully:
* Remove all references to it.
* Remove the the failing back node from the failed nodes list.
@@ -409,8 +408,7 @@
// Since the metadata node will be changed, we need to rebind the proxy object
MetadataManager.INSTANCE.rebindMetadataNode();
} catch (Exception e) {
-
- /**
+ /*
* if we fail to send the request, it means the NC we tried to send the request to
* has failed. When the failure notification arrives, a new NC will be assigned to
* the metadata partition and a new metadata node takeover request will be sent to it.
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index f3182cf..54f2c06 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -18,8 +18,33 @@
*/
package org.apache.asterix.hyracks.bootstrap;
+import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_APP_CONTEXT_INFO_ATTR;
+import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.ConcurrentMap;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
import org.apache.asterix.active.ActiveLifecycleListener;
-import org.apache.asterix.api.http.server.*;
+import org.apache.asterix.api.http.server.ApiServlet;
+import org.apache.asterix.api.http.server.ClusterApiServlet;
+import org.apache.asterix.api.http.server.ClusterControllerDetailsApiServlet;
+import org.apache.asterix.api.http.server.ConnectorApiServlet;
+import org.apache.asterix.api.http.server.DdlApiServlet;
+import org.apache.asterix.api.http.server.DiagnosticsApiServlet;
+import org.apache.asterix.api.http.server.FeedServlet;
+import org.apache.asterix.api.http.server.FullApiServlet;
+import org.apache.asterix.api.http.server.NodeControllerDetailsApiServlet;
+import org.apache.asterix.api.http.server.QueryApiServlet;
+import org.apache.asterix.api.http.server.QueryResultApiServlet;
+import org.apache.asterix.api.http.server.QueryServiceServlet;
+import org.apache.asterix.api.http.server.QueryStatusApiServlet;
+import org.apache.asterix.api.http.server.QueryWebInterfaceServlet;
+import org.apache.asterix.api.http.server.ShutdownApiServlet;
+import org.apache.asterix.api.http.server.UpdateApiServlet;
+import org.apache.asterix.api.http.server.VersionApiServlet;
import org.apache.asterix.api.http.servlet.ServletConstants;
import org.apache.asterix.app.cc.CCExtensionManager;
import org.apache.asterix.app.cc.ResourceIdManager;
@@ -27,6 +52,7 @@
import org.apache.asterix.app.replication.FaultToleranceStrategyFactory;
import org.apache.asterix.common.api.AsterixThreadFactory;
import org.apache.asterix.common.config.AsterixExtension;
+import org.apache.asterix.common.config.AsterixProperties;
import org.apache.asterix.common.config.ClusterProperties;
import org.apache.asterix.common.config.ExternalProperties;
import org.apache.asterix.common.config.MetadataProperties;
@@ -47,9 +73,9 @@
import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.translator.IStatementExecutorFactory;
import org.apache.hyracks.api.application.ICCApplicationContext;
-import org.apache.hyracks.api.application.ICCApplicationEntryPoint;
import org.apache.hyracks.api.client.HyracksConnection;
import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.config.IConfigManager;
import org.apache.hyracks.api.job.resource.IJobCapacityController;
import org.apache.hyracks.api.lifecycle.LifeCycleComponentManager;
import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -57,15 +83,9 @@
import org.apache.hyracks.http.api.IServlet;
import org.apache.hyracks.http.server.HttpServer;
import org.apache.hyracks.http.server.WebManager;
+import org.apache.hyracks.util.file.FileUtil;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_BUILD_PROP_ATTR;
-import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
-
-public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
+public class CCApplicationEntryPoint extends org.apache.hyracks.control.cc.CCApplicationEntryPoint {
private static final Logger LOGGER = Logger.getLogger(CCApplicationEntryPoint.class.getName());
private static IAsterixStateProxy proxy;
@@ -75,8 +95,15 @@
private IJobCapacityController jobCapacityController;
protected WebManager webManager;
+ public CCApplicationEntryPoint() {
+ CCConfig.defaultDir = FileUtil.joinPath(System.getProperty("java.io.tmpdir"), "asterixdb");
+ }
+
@Override
public void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception {
+ if (args.length > 0) {
+ throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
+ }
final ClusterControllerService controllerService = (ClusterControllerService) ccAppCtx.getControllerService();
ICCMessageBroker messageBroker = new CCMessageBroker(controllerService);
this.appCtx = ccAppCtx;
@@ -100,7 +127,7 @@
AppContextInfo.INSTANCE.setExtensionManager(ccExtensionManager);
final CCConfig ccConfig = controllerService.getCCConfig();
if (System.getProperty("java.rmi.server.hostname") == null) {
- System.setProperty("java.rmi.server.hostname", ccConfig.clusterNetIpAddress);
+ System.setProperty("java.rmi.server.hostname", ccConfig.getClusterListenAddress());
}
MetadataProperties metadataProperties = AppContextInfo.INSTANCE.getMetadataProperties();
@@ -165,7 +192,7 @@
new HttpServer(webManager.getBosses(), webManager.getWorkers(), externalProperties.getAPIServerPort());
IHyracksClientConnection hcc = getNewHyracksClientConnection();
jsonAPIServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
- jsonAPIServer.setAttribute(ASTERIX_BUILD_PROP_ATTR, AppContextInfo.INSTANCE);
+ jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, AppContextInfo.INSTANCE);
jsonAPIServer.setAttribute(ServletConstants.EXECUTOR_SERVICE,
((ClusterControllerService) appCtx.getControllerService()).getExecutor());
@@ -196,7 +223,7 @@
}
protected void addServlet(HttpServer server, String path) {
- server.addServlet(createServlet(server, path, path));
+ server.addServlet(createServlet(server.ctx(), path, path));
}
protected HttpServer setupQueryWebServer(ExternalProperties externalProperties) throws Exception {
@@ -216,53 +243,53 @@
return feedServer;
}
- protected IServlet createServlet(HttpServer server, String key, String... paths) {
+ protected IServlet createServlet(ConcurrentMap<String, Object> ctx, String key, String... paths) {
switch (key) {
case Servlets.AQL:
- return new FullApiServlet(server.ctx(), paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new FullApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.AQL_QUERY:
- return new QueryApiServlet(server.ctx(), paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new QueryApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.AQL_UPDATE:
- return new UpdateApiServlet(server.ctx(), paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new UpdateApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.AQL_DDL:
- return new DdlApiServlet(server.ctx(), paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new DdlApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP:
- return new FullApiServlet(server.ctx(), paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new FullApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP_QUERY:
- return new QueryApiServlet(server.ctx(), paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new QueryApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP_UPDATE:
- return new UpdateApiServlet(server.ctx(), paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new UpdateApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP_DDL:
- return new DdlApiServlet(server.ctx(), paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new DdlApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.QUERY_STATUS:
- return new QueryStatusApiServlet(server.ctx(), paths);
+ return new QueryStatusApiServlet(ctx, paths);
case Servlets.QUERY_RESULT:
- return new QueryResultApiServlet(server.ctx(), paths);
+ return new QueryResultApiServlet(ctx, paths);
case Servlets.QUERY_SERVICE:
- return new QueryServiceServlet(server.ctx(), paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new QueryServiceServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.CONNECTOR:
- return new ConnectorApiServlet(server.ctx(), paths);
+ return new ConnectorApiServlet(ctx, paths);
case Servlets.SHUTDOWN:
- return new ShutdownApiServlet(server.ctx(), paths);
+ return new ShutdownApiServlet(ctx, paths);
case Servlets.VERSION:
- return new VersionApiServlet(server.ctx(), paths);
+ return new VersionApiServlet(ctx, paths);
case Servlets.CLUSTER_STATE:
- return new ClusterApiServlet(server.ctx(), paths);
+ return new ClusterApiServlet(ctx, paths);
case Servlets.CLUSTER_STATE_NODE_DETAIL:
- return new NodeControllerDetailsApiServlet(server.ctx(), paths);
+ return new NodeControllerDetailsApiServlet(ctx, paths);
case Servlets.CLUSTER_STATE_CC_DETAIL:
- return new ClusterControllerDetailsApiServlet(server.ctx(), paths);
+ return new ClusterControllerDetailsApiServlet(ctx, paths);
case Servlets.DIAGNOSTICS:
- return new DiagnosticsApiServlet(server.ctx(), paths);
+ return new DiagnosticsApiServlet(ctx, paths);
default:
throw new IllegalStateException(String.valueOf(key));
}
@@ -283,7 +310,13 @@
return jobCapacityController;
}
+ @Override
+ public void registerConfig(IConfigManager configManager) {
+ super.registerConfig(configManager);
+ AsterixProperties.registerConfigOptions(configManager);
+ }
+
public static synchronized void setAsterixStateProxy(IAsterixStateProxy proxy) {
- CCApplicationEntryPoint.proxy = proxy;
+ org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint.proxy = proxy;
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index 53b577a..8883504 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -43,6 +43,7 @@
import org.apache.asterix.metadata.cluster.RemoveNodeWorkResponse;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.api.application.IClusterLifecycleListener;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksException;
public class ClusterLifecycleListener implements IClusterLifecycleListener {
@@ -66,7 +67,7 @@
}
@Override
- public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) throws HyracksException {
+ public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("NC: " + nodeId + " joined");
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index 7f649bc..238e93c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -18,15 +18,24 @@
*/
package org.apache.asterix.hyracks.bootstrap;
+import java.io.File;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
import org.apache.asterix.app.nc.NCAppRuntimeContext;
import org.apache.asterix.app.replication.message.StartupTaskRequestMessage;
import org.apache.asterix.common.api.AsterixThreadFactory;
import org.apache.asterix.common.api.IAppRuntimeContext;
import org.apache.asterix.common.config.AsterixExtension;
+import org.apache.asterix.common.config.AsterixProperties;
import org.apache.asterix.common.config.ClusterProperties;
import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.config.NodeProperties;
import org.apache.asterix.common.config.StorageProperties;
import org.apache.asterix.common.config.TransactionProperties;
import org.apache.asterix.common.transactions.IRecoveryManager;
@@ -40,53 +49,42 @@
import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import org.apache.commons.io.FileUtils;
import org.apache.hyracks.api.application.INCApplicationContext;
-import org.apache.hyracks.api.application.INCApplicationEntryPoint;
+import org.apache.hyracks.api.config.IConfigManager;
import org.apache.hyracks.api.job.resource.NodeCapacity;
import org.apache.hyracks.api.messages.IMessageBroker;
+import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.nc.NodeControllerService;
-import org.kohsuke.args4j.CmdLineException;
-import org.kohsuke.args4j.CmdLineParser;
-import org.kohsuke.args4j.Option;
+import org.apache.hyracks.util.file.FileUtil;
-import java.io.File;
-import java.util.Collections;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
+public class NCApplicationEntryPoint extends org.apache.hyracks.control.nc.NCApplicationEntryPoint {
private static final Logger LOGGER = Logger.getLogger(NCApplicationEntryPoint.class.getName());
- @Option(name = "-initial-run", usage = "A flag indicating if it's the first time the NC is started "
- + "(default: false)", required = false)
- public boolean initialRun = false;
-
- @Option(name = "-virtual-NC", usage = "A flag indicating if this NC is running on virtual cluster "
- + "(default: false)", required = false)
- public boolean virtualNC = false;
-
- private INCApplicationContext ncApplicationContext = null;
+ private INCApplicationContext ncAppCtx;
private IAppRuntimeContext runtimeContext;
private String nodeId;
private boolean stopInitiated = false;
private SystemState systemState;
- private IMessageBroker messageBroker;
+
+ public NCApplicationEntryPoint() {
+ NCConfig.defaultDir = FileUtil.joinPath(System.getProperty("java.io.tmpdir"), "asterixdb");
+ NCConfig.defaultAppClass = "org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint";
+ }
+
+ @Override
+ public void registerConfigOptions(IConfigManager configManager) {
+ super.registerConfigOptions(configManager);
+ AsterixProperties.registerConfigOptions(configManager);
+ }
@Override
public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
- CmdLineParser parser = new CmdLineParser(this);
- try {
- parser.parseArgument(args);
- } catch (CmdLineException e) {
- LOGGER.severe(e.getMessage());
- LOGGER.severe("Usage:");
- parser.printUsage(System.err);
- throw e;
+ if (args.length > 0) {
+ throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
}
- ncAppCtx.setThreadFactory(
- new AsterixThreadFactory(ncAppCtx.getThreadFactory(), ncAppCtx.getLifeCycleComponentManager()));
- ncApplicationContext = ncAppCtx;
- nodeId = ncApplicationContext.getNodeId();
+ ncAppCtx.setThreadFactory(new AsterixThreadFactory(ncAppCtx.getThreadFactory(),
+ ncAppCtx.getLifeCycleComponentManager()));
+ this.ncAppCtx = ncAppCtx;
+ nodeId = this.ncAppCtx.getNodeId();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting Asterix node controller: " + nodeId);
}
@@ -94,25 +92,25 @@
final NodeControllerService controllerService = (NodeControllerService) ncAppCtx.getControllerService();
if (System.getProperty("java.rmi.server.hostname") == null) {
- System.setProperty("java.rmi.server.hostname",
- (controllerService).getConfiguration().clusterNetPublicIPAddress);
+ System.setProperty("java.rmi.server.hostname", (controllerService)
+ .getConfiguration().getClusterPublicAddress());
}
- runtimeContext = new NCAppRuntimeContext(ncApplicationContext, getExtensions());
- MetadataProperties metadataProperties = ((IPropertiesProvider) runtimeContext).getMetadataProperties();
- if (!metadataProperties.getNodeNames().contains(ncApplicationContext.getNodeId())) {
+ runtimeContext = new NCAppRuntimeContext(this.ncAppCtx, getExtensions());
+ MetadataProperties metadataProperties = runtimeContext.getMetadataProperties();
+ if (!metadataProperties.getNodeNames().contains(this.ncAppCtx.getNodeId())) {
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Substitute node joining : " + ncApplicationContext.getNodeId());
+ LOGGER.info("Substitute node joining : " + this.ncAppCtx.getNodeId());
}
updateOnNodeJoin();
}
- runtimeContext.initialize(initialRun);
- ncApplicationContext.setApplicationObject(runtimeContext);
- MessagingProperties messagingProperties = ((IPropertiesProvider) runtimeContext).getMessagingProperties();
- messageBroker = new NCMessageBroker(controllerService, messagingProperties);
- ncApplicationContext.setMessageBroker(messageBroker);
+ runtimeContext.initialize(runtimeContext.getNodeProperties().isInitialRun());
+ this.ncAppCtx.setApplicationObject(runtimeContext);
+ MessagingProperties messagingProperties = runtimeContext.getMessagingProperties();
+ IMessageBroker messageBroker = new NCMessageBroker(controllerService, messagingProperties);
+ this.ncAppCtx.setMessageBroker(messageBroker);
MessagingChannelInterfaceFactory interfaceFactory = new MessagingChannelInterfaceFactory(
(NCMessageBroker) messageBroker, messagingProperties);
- ncApplicationContext.setMessagingChannelInterfaceFactory(interfaceFactory);
+ this.ncAppCtx.setMessagingChannelInterfaceFactory(interfaceFactory);
IRecoveryManager recoveryMgr = runtimeContext.getTransactionSubsystem().getRecoveryManager();
systemState = recoveryMgr.getSystemState();
@@ -148,7 +146,7 @@
performLocalCleanUp();
//Note: stopping recovery manager will make a sharp checkpoint
- ncApplicationContext.getLifeCycleComponentManager().stopAll(false);
+ ncAppCtx.getLifeCycleComponentManager().stopAll(false);
runtimeContext.deinitialize();
} else {
if (LOGGER.isLoggable(Level.INFO)) {
@@ -160,17 +158,18 @@
@Override
public void notifyStartupComplete() throws Exception {
// Since we don't pass initial run flag in AsterixHyracksIntegrationUtil, we use the virtualNC flag
- if (systemState == SystemState.PERMANENT_DATA_LOSS && (initialRun || virtualNC)) {
+ final NodeProperties nodeProperties = runtimeContext.getNodeProperties();
+ if (systemState == SystemState.PERMANENT_DATA_LOSS && (nodeProperties.isInitialRun() || nodeProperties.isVirtualNc())) {
systemState = SystemState.BOOTSTRAPPING;
}
// Request startup tasks from CC
- StartupTaskRequestMessage.send((NodeControllerService) ncApplicationContext.getControllerService(),
+ StartupTaskRequestMessage.send((NodeControllerService) ncAppCtx.getControllerService(),
systemState);
}
@Override
public NodeCapacity getCapacity() {
- IPropertiesProvider propertiesProvider = (IPropertiesProvider) runtimeContext;
+ IPropertiesProvider propertiesProvider = runtimeContext;
StorageProperties storageProperties = propertiesProvider.getStorageProperties();
// Deducts the reserved buffer cache size and memory component size from the maxium heap size,
// and deducts one core for processing heartbeats.
@@ -201,15 +200,16 @@
}
private void updateOnNodeJoin() {
- MetadataProperties metadataProperties = ((IPropertiesProvider) runtimeContext).getMetadataProperties();
+ MetadataProperties metadataProperties = runtimeContext.getMetadataProperties();
if (!metadataProperties.getNodeNames().contains(nodeId)) {
- metadataProperties.getNodeNames().add(nodeId);
Cluster cluster = ClusterProperties.INSTANCE.getCluster();
if (cluster == null) {
throw new IllegalStateException("No cluster configuration found for this instance");
}
+ NCConfig ncConfig = ((NodeControllerService) ncAppCtx.getControllerService()).getConfiguration();
+ ncConfig.getConfigManager().registerVirtualNode(nodeId);
String asterixInstanceName = metadataProperties.getInstanceName();
- TransactionProperties txnProperties = ((IPropertiesProvider) runtimeContext).getTransactionProperties();
+ TransactionProperties txnProperties = runtimeContext.getTransactionProperties();
Node self = null;
List<Node> nodes;
if (cluster.getSubstituteNodes() != null) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java
index 0ab4e54..241cd65 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/util/FaultToleranceUtil.java
@@ -31,11 +31,12 @@
import org.apache.asterix.runtime.message.ReplicaEventMessage;
import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.control.common.controllers.NCConfig;
public class FaultToleranceUtil {
private static final Logger LOGGER = Logger.getLogger(FaultToleranceUtil.class.getName());
- private static final String CLUSTER_NET_IP_ADDRESS_KEY = "cluster-net-ip-address";
private FaultToleranceUtil() {
throw new AssertionError();
@@ -47,10 +48,10 @@
List<String> primaryRemoteReplicas = replicationStrategy.getRemotePrimaryReplicas(nodeId).stream()
.map(Replica::getId).collect(Collectors.toList());
String nodeIdAddress = StringUtils.EMPTY;
- Map<String, Map<String, String>> activeNcConfiguration = clusterManager.getActiveNcConfiguration();
+ Map<String, Map<IOption, Object>> activeNcConfiguration = clusterManager.getActiveNcConfiguration();
// In case the node joined with a new IP address, we need to send it to the other replicas
if (event == ClusterEventType.NODE_JOIN) {
- nodeIdAddress = activeNcConfiguration.get(nodeId).get(CLUSTER_NET_IP_ADDRESS_KEY);
+ nodeIdAddress = (String)activeNcConfiguration.get(nodeId).get(NCConfig.Option.CLUSTER_PUBLIC_ADDRESS);
}
ReplicaEventMessage msg = new ReplicaEventMessage(nodeId, nodeIdAddress, event);
for (String replica : primaryRemoteReplicas) {
diff --git a/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml b/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml
index 7390d57..42cc42c 100644
--- a/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml
+++ b/asterixdb/asterix-app/src/main/resources/asterix-build-configuration.xml
@@ -110,4 +110,9 @@
<description>Number of reusable frames for NC to NC messaging. (Default = 512)
</description>
</property>
+ <property>
+ <name>log.level</name>
+ <value>INFO</value>
+ <description>foo</description>
+ </property>
</asterixConfiguration>
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
index 7fed010..52ac855 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
@@ -19,7 +19,7 @@
package org.apache.asterix.api.http.servlet;
-import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_BUILD_PROP_ATTR;
+import static org.apache.asterix.api.http.servlet.ServletConstants.ASTERIX_APP_CONTEXT_INFO_ATTR;
import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -69,7 +69,7 @@
// Put stuff in let map
servlet.ctx().put(HYRACKS_CONNECTION_ATTR, mockHcc);
- servlet.ctx().put(ASTERIX_BUILD_PROP_ATTR, mockCtx);
+ servlet.ctx().put(ASTERIX_APP_CONTEXT_INFO_ATTR, mockCtx);
// Sets up mock returns.
when(mockResponse.writer()).thenReturn(outputWriter);
when(mockRequest.getHttpRequest()).thenReturn(mockHttpRequest);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index cc12f36..2061cda 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -58,7 +58,6 @@
import org.apache.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
import org.apache.asterix.transaction.management.runtime.CommitRuntime;
import org.apache.asterix.transaction.management.service.logging.LogReader;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.base.IPushRuntime;
import org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory;
@@ -93,6 +92,7 @@
import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
import org.apache.hyracks.storage.common.file.LocalResource;
import org.apache.hyracks.test.support.TestUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -101,8 +101,7 @@
protected static final Logger LOGGER = Logger.getLogger(TestNodeController.class.getName());
protected static final String PATH_ACTUAL = "unittest" + File.separator;
- protected static final String PATH_BASE =
- StringUtils.join(new String[] { "src", "test", "resources", "nodetests" }, File.separator);
+ protected static final String PATH_BASE = FileUtil.joinPath("src", "test", "resources", "nodetests");
protected static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
protected static TransactionProperties txnProperties;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/config/ConfigUsageTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/config/ConfigUsageTest.java
new file mode 100644
index 0000000..b96b7fe
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/config/ConfigUsageTest.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.config;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+
+import org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.util.file.FileUtil;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class ConfigUsageTest {
+
+ private static final String CSV_FILE = FileUtil.joinPath("target", "surefire-reports", "config-options.csv");
+
+ @Test
+ public void generateUsage() {
+ generateUsage("| ", " | ", " |", true, System.err);
+ }
+
+ @Test
+ public void generateUsageCSV() throws IOException {
+ new File(CSV_FILE).getParentFile().mkdirs();
+ try (final PrintStream output = new PrintStream(new FileOutputStream(CSV_FILE))) {
+ generateUsage("\"", "\",\"", "\"", false, output);
+ // TODO(mblow): add some validation (in addition to just ensuring no exceptions...)
+ }
+ }
+
+ public void generateUsage(String startDelim, String midDelim, String endDelim, boolean align, PrintStream output) {
+ ConfigManager configManager = new ConfigManager();
+ CCApplicationEntryPoint aep = new CCApplicationEntryPoint();
+ aep.registerConfig(configManager);
+ StringBuilder buf = new StringBuilder();
+ int maxSectionWidth = 0;
+ int maxNameWidth = 0;
+ int maxDescriptionWidth = 0;
+ int maxDefaultWidth = 0;
+ if (align) {
+ for (Section section : configManager.getSections()) {
+ maxSectionWidth = Math.max(maxSectionWidth, section.sectionName().length());
+ for (IOption option : configManager.getOptions(section)) {
+ if (option.hidden()) {
+ continue;
+ }
+ maxNameWidth = Math.max(maxNameWidth, option.ini().length());
+ maxDescriptionWidth = Math.max(maxDescriptionWidth,
+ option.description() == null ? 0 : option.description().length());
+ maxDefaultWidth = Math.max(maxDefaultWidth, configManager.defaultTextForUsage(option, IOption::ini)
+ .length());
+ }
+ }
+ }
+ maxDescriptionWidth = Math.min(80, maxDescriptionWidth);
+ for (Section section : configManager.getSections()) {
+ List<IOption> options = new ArrayList<>(configManager.getOptions(section));
+ options.sort(Comparator.comparing(IOption::ini));
+ for (IOption option : options) {
+ if (option.hidden()) {
+ continue;
+ }
+ buf.append(startDelim);
+ center(buf, section.sectionName(), maxSectionWidth).append(midDelim);
+ pad(buf, option.ini(), maxNameWidth).append(midDelim);
+ String description = option.description() == null ? "" : option.description();
+ String defaultText = configManager.defaultTextForUsage(option, IOption::ini);
+ boolean extra = false;
+ while (align && description.length() > maxDescriptionWidth) {
+ int cut = description.lastIndexOf(' ', maxDescriptionWidth);
+ pad(buf, description.substring(0, cut), maxDescriptionWidth).append(midDelim);
+ pad(buf, defaultText, maxDefaultWidth).append(endDelim).append('\n');
+ defaultText = "";
+ description = description.substring(cut + 1);
+ buf.append(startDelim);
+ pad(buf, "", maxSectionWidth).append(midDelim);
+ pad(buf, "", maxNameWidth).append(midDelim);
+ }
+ pad(buf, description, maxDescriptionWidth).append(midDelim);
+ pad(buf, defaultText, maxDefaultWidth).append(endDelim).append('\n');
+ if (extra) {
+ buf.append(startDelim);
+ pad(buf, "", maxSectionWidth).append(midDelim);
+ pad(buf, "", maxNameWidth).append(midDelim);
+ pad(buf, "", maxDescriptionWidth).append(midDelim);
+ pad(buf, "", maxDefaultWidth).append(endDelim).append('\n');
+ }
+ }
+ }
+ output.println(buf);
+ }
+
+ private StringBuilder center(StringBuilder buf, String string, int width) {
+ if (string == null) {
+ string = "";
+ }
+ int pad = width - string.length();
+ int leftPad = pad / 2;
+ for (int i = leftPad; i > 0; i--) {
+ buf.append(' ');
+ }
+ buf.append(string);
+ for (int i = pad - leftPad; i > 0; i--) {
+ buf.append(' ');
+ }
+ return buf;
+ }
+
+ private StringBuilder pad(StringBuilder buf, String string, int width) {
+ if (string == null) {
+ string = "";
+ }
+ buf.append(string);
+ for (int i = width - string.length(); i > 0; i--) {
+ buf.append(' ');
+ }
+ return buf;
+ }
+
+}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index ae40827..7765572 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -1113,13 +1113,13 @@
private void deleteNCTxnLogs(String nodeId, CompilationUnit cUnit) throws Exception {
OutputFormat fmt = OutputFormat.forCompilationUnit(cUnit);
- String endpoint = "/admin/cluster";
+ String endpoint = "/admin/cluster/node/" + nodeId + "/config";
InputStream executeJSONGet = executeJSONGet(fmt, new URI("http://" + host + ":" + port + endpoint));
StringWriter actual = new StringWriter();
IOUtils.copy(executeJSONGet, actual, StandardCharsets.UTF_8);
String config = actual.toString();
ObjectMapper om = new ObjectMapper();
- String logDir = om.readTree(config).findPath("transaction.log.dirs").get(nodeId).asText();
+ String logDir = om.readTree(config).findPath("txn.log.dir").asText();
FileUtils.deleteQuietly(new File(logDir));
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java
index c1399fb..df9782a 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestHelper.java
@@ -38,7 +38,7 @@
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.util.file.FileUtil;
public final class TestHelper {
@@ -54,10 +54,6 @@
return false;
}
- public static String joinPath(String... pathElements) {
- return StringUtils.join(pathElements, File.separatorChar);
- }
-
public static void unzip(String sourceFile, String outputDir) throws IOException {
if (System.getProperty("os.name").toLowerCase().startsWith("win")) {
try (ZipFile zipFile = new ZipFile(sourceFile)) {
@@ -117,7 +113,7 @@
public static void deleteExistingInstanceFiles() {
for (String dirName : TEST_DIRS) {
- File f = new File(joinPath(TEST_DIR_BASE_PATH, dirName));
+ File f = new File(FileUtil.joinPath(TEST_DIR_BASE_PATH, dirName));
if (FileUtils.deleteQuietly(f)) {
System.out.println("Dir " + f.getName() + " deleted");
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java
index 10e8658..34bb9cf 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java
@@ -82,7 +82,7 @@
private static final String DATASET_NAME = "TestDS";
private static final String DATA_TYPE_NAME = "DUMMY";
private static final String NODE_GROUP_NAME = "DEFAULT";
- private static final int TXN_LOG_PARTITION_SIZE = StorageUtil.getSizeInBytes(2, StorageUnit.MEGABYTE);
+ private static final int TXN_LOG_PARTITION_SIZE = StorageUtil.getIntSizeInBytes(2, StorageUnit.MEGABYTE);
@Before
public void setUp() throws Exception {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ClusterStateDefaultParameterTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ClusterStateDefaultParameterTest.java
index 545b2a1..86a9639 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ClusterStateDefaultParameterTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ClusterStateDefaultParameterTest.java
@@ -65,7 +65,7 @@
@Test
public void test() throws Exception {
StringBuilder result = new StringBuilder();
- URL url = new URL("http://localhost:19002/admin/cluster");
+ URL url = new URL("http://localhost:19002/admin/cluster/node/asterix_nc1/config");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream()));
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java
index 31103a8..486a219 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/OptimizerParserTest.java
@@ -24,7 +24,7 @@
import java.util.logging.Logger;
import org.apache.asterix.test.base.AsterixTestHelper;
-import org.apache.asterix.test.common.TestHelper;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -41,10 +41,10 @@
private static final String EXTENSION_RESULT = "ast";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
- private static final String PATH_BASE = TestHelper.joinPath("src", "test", "resources", "optimizerts");
- private static final String PATH_QUERIES = TestHelper.joinPath(PATH_BASE, "queries_sqlpp");
- private static final String PATH_EXPECTED = TestHelper.joinPath(PATH_BASE, "results_parser_sqlpp");
- private static final String PATH_ACTUAL = TestHelper.joinPath("target", "opt_parserts", "results_parser_sqlpp");
+ private static final String PATH_BASE = FileUtil.joinPath("src", "test", "resources", "optimizerts");
+ private static final String PATH_QUERIES = FileUtil.joinPath(PATH_BASE, "queries_sqlpp");
+ private static final String PATH_EXPECTED = FileUtil.joinPath(PATH_BASE, "results_parser_sqlpp");
+ private static final String PATH_ACTUAL = FileUtil.joinPath("target", "opt_parserts", "results_parser_sqlpp");
private static final ArrayList<String> ignore = AsterixTestHelper.readFile(FILENAME_IGNORE, PATH_BASE);
private static final ArrayList<String> only = AsterixTestHelper.readFile(FILENAME_ONLY, PATH_BASE);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java
index 891e463..9c3c393 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestUtil.java
@@ -25,6 +25,7 @@
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.asterix.test.common.TestHelper;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.Assume;
import org.junit.internal.AssumptionViolatedException;
@@ -34,15 +35,15 @@
String extensionQuery, String extensionResult, String pathExpected, String pathActual) {
if (file.isDirectory() && !file.getName().startsWith(".")) {
for (File innerfile : file.listFiles()) {
- String subdir = innerfile.isDirectory() ? TestHelper.joinPath(path, innerfile.getName()) : path;
+ String subdir = innerfile.isDirectory() ? FileUtil.joinPath(path, innerfile.getName()) : path;
suiteBuild(innerfile, testArgs, subdir, separator, extensionQuery, extensionResult, pathExpected,
pathActual);
}
}
if (file.isFile() && file.getName().endsWith(extensionQuery)) {
String resultFileName = AsterixTestHelper.extToResExt(file.getName(), extensionResult);
- File expectedFile = new File(TestHelper.joinPath(pathExpected, path, resultFileName));
- File actualFile = new File(TestHelper.joinPath(pathActual, path, resultFileName));
+ File expectedFile = new File(FileUtil.joinPath(pathExpected, path, resultFileName));
+ File actualFile = new File(FileUtil.joinPath(pathActual, path, resultFileName));
testArgs.add(new Object[] { file, expectedFile, actualFile });
}
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java
index 8fe9370..3c856b5 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/SmokeParserTest.java
@@ -24,7 +24,7 @@
import java.util.logging.Logger;
import org.apache.asterix.test.base.AsterixTestHelper;
-import org.apache.asterix.test.common.TestHelper;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -41,10 +41,10 @@
private static final String EXTENSION_RESULT = "ast";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
- private static final String PATH_BASE = TestHelper.joinPath("src", "test", "resources", "parserts");
- private static final String PATH_QUERIES = TestHelper.joinPath(PATH_BASE, "queries_sqlpp");
- private static final String PATH_EXPECTED = TestHelper.joinPath(PATH_BASE, "results_parser_sqlpp");
- private static final String PATH_ACTUAL = TestHelper.joinPath("target", "parserts");
+ private static final String PATH_BASE = FileUtil.joinPath("src", "test", "resources", "parserts");
+ private static final String PATH_QUERIES = FileUtil.joinPath(PATH_BASE, "queries_sqlpp");
+ private static final String PATH_EXPECTED = FileUtil.joinPath(PATH_BASE, "results_parser_sqlpp");
+ private static final String PATH_ACTUAL = FileUtil.joinPath("target", "parserts");
private static final ArrayList<String> ignore = AsterixTestHelper.readFile(FILENAME_IGNORE, PATH_BASE);
private static final ArrayList<String> only = AsterixTestHelper.readFile(FILENAME_ONLY, PATH_BASE);
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/api.xml b/asterixdb/asterix-app/src/test/resources/runtimets/api.xml
index 0fa83dd..372aa47 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/api.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/api.xml
@@ -66,11 +66,6 @@
</compilation-unit>
</test-case>
<test-case FilePath="api">
- <compilation-unit name="replication">
- <output-dir compare="Text">replication</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="api">
<compilation-unit name="query_status_1">
<output-dir compare="Text">query_status_1</output-dir>
<expected-error>HTTP/1.1 404 Not Found</expected-error>
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/APIQueries.xml b/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/APIQueries.xml
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/APIQueries.xml
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
index 42fb7c3..03884bd 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1/cluster_state_1.1.adm
@@ -5,110 +5,32 @@
"threadDumpUri" : "http://127.0.0.1:19002/admin/cluster/cc/threaddump"
},
"config" : {
- "api.port" : 19002,
- "cluster.partitions" : {
- "0" : {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- },
- "1" : {
- "partitionId" : 1,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 1
- },
- "2" : {
- "partitionId" : 2,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 0
- },
- "3" : {
- "partitionId" : 3,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 1
- }
- },
"compiler.framesize" : 32768,
"compiler.groupmemory" : 163840,
"compiler.joinmemory" : 262144,
"compiler.parallelism" : 0,
"compiler.pregelix.home" : "~/pregelix",
"compiler.sortmemory" : 327680,
- "core.dump.paths" : { },
"feed.central.manager.port" : 4500,
"feed.max.threshold.period" : 5,
"feed.memory.available.wait.timeout" : 10,
"feed.memory.global.budget" : 67108864,
"feed.pending.work.threshold" : 50,
- "feed.port" : 19003,
- "instance.name" : null,
- "log.level" : "WARNING",
+ "instance.name" : "DEFAULT_INSTANCE",
+ "log.level" : "INFO",
"max.wait.active.cluster" : 60,
+ "messaging.frame.count" : 512,
+ "messaging.frame.size" : 4096,
"metadata.callback.port" : 0,
+ "metadata.listen.port" : 0,
"metadata.node" : "asterix_nc1",
- "metadata.partition" : {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- },
- "metadata.port" : 0,
"metadata.registration.timeout.secs" : 60,
- "node.partitions" : {
- "asterix_nc1" : [ {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- }, {
- "partitionId" : 1,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 1
- } ],
- "asterix_nc2" : [ {
- "partitionId" : 2,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 0
- }, {
- "partitionId" : 3,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 1
- } ]
- },
- "node.stores" : {
- "asterix_nc1" : [ "iodevice0", "iodevice1" ],
- "asterix_nc2" : [ "iodevice0", "iodevice1" ]
- },
"plot.activate" : false,
- "storage.buffercache.maxopenfiles" : 2147483647,
- "storage.buffercache.pagesize" : 32768,
- "storage.buffercache.size" : 50331648,
- "storage.lsm.bloomfilter.falsepositiverate" : 0.01,
- "storage.memorycomponent.globalbudget" : 536870912,
- "storage.memorycomponent.numcomponents" : 2,
- "storage.memorycomponent.numpages" : 8,
- "storage.memorycomponent.pagesize" : 131072,
- "storage.metadata.memorycomponent.numpages" : 85,
- "transaction.log.dirs" : {
- "asterix_nc1" : "target/txnLogDir/asterix_nc1",
- "asterix_nc2" : "target/txnLogDir/asterix_nc2"
- },
+ "replication.log.batchsize" : 4096,
+ "replication.log.buffer.numpages" : 8,
+ "replication.log.buffer.pagesize" : 131072,
+ "replication.max.remote.recovery.attempts" : 5,
+ "replication.timeout" : 30,
"txn.commitprofiler.reportinterval" : 5,
"txn.job.recovery.memorysize" : 67108864,
"txn.lock.escalationthreshold" : 1000,
@@ -120,10 +42,7 @@
"txn.log.checkpoint.history" : 0,
"txn.log.checkpoint.lsnthreshold" : 67108864,
"txn.log.checkpoint.pollfrequency" : 120,
- "txn.log.partitionsize" : 268435456,
- "web.port" : 19001,
- "web.queryinterface.port" : 19006,
- "web.secondary.port" : 19005
+ "txn.log.partitionsize" : 268435456
},
"diagnosticsUri" : "http://127.0.0.1:19002/admin/diagnostics",
"fullShutdownUri" : "http://127.0.0.1:19002/admin/shutdown?all=true",
@@ -155,7 +74,6 @@
"statsUri" : "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats",
"threadDumpUri" : "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump"
} ],
- "replicationUri" : "http://127.0.0.1:19002/admin/cluster/replication",
"shutdownUri" : "http://127.0.0.1:19002/admin/shutdown",
"state" : "ACTIVE",
"versionUri" : "http://127.0.0.1:19002/admin/version"
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.adm
index 75c4d3e..372ac00 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_full/cluster_state_1_full.1.adm
@@ -5,110 +5,32 @@
"threadDumpUri" : "http://127.0.0.1:19002/admin/cluster/cc/threaddump"
},
"config" : {
- "api.port" : 19002,
- "cluster.partitions" : {
- "0" : {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- },
- "1" : {
- "partitionId" : 1,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 1
- },
- "2" : {
- "partitionId" : 2,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 0
- },
- "3" : {
- "partitionId" : 3,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 1
- }
- },
"compiler.framesize" : 32768,
"compiler.groupmemory" : 163840,
"compiler.joinmemory" : 262144,
"compiler.parallelism" : -1,
"compiler.pregelix.home" : "~/pregelix",
"compiler.sortmemory" : 327680,
- "core.dump.paths" : { },
"feed.central.manager.port" : 4500,
"feed.max.threshold.period" : 5,
"feed.memory.available.wait.timeout" : 10,
"feed.memory.global.budget" : 67108864,
"feed.pending.work.threshold" : 50,
- "feed.port" : 19003,
- "instance.name" : null,
+ "instance.name" : "DEFAULT_INSTANCE",
"log.level" : "WARNING",
"max.wait.active.cluster" : 60,
+ "messaging.frame.count" : 512,
+ "messaging.frame.size" : 4096,
"metadata.callback.port" : 0,
+ "metadata.listen.port" : 0,
"metadata.node" : "asterix_nc1",
- "metadata.partition" : {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- },
- "metadata.port" : 0,
"metadata.registration.timeout.secs" : 60,
- "node.partitions" : {
- "asterix_nc1" : [ {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- }, {
- "partitionId" : 1,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 1
- } ],
- "asterix_nc2" : [ {
- "partitionId" : 2,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 0
- }, {
- "partitionId" : 3,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 1
- } ]
- },
- "node.stores" : {
- "asterix_nc1" : [ "iodevice0", "iodevice1" ],
- "asterix_nc2" : [ "iodevice0", "iodevice1" ]
- },
"plot.activate" : false,
- "storage.buffercache.maxopenfiles" : 2147483647,
- "storage.buffercache.pagesize" : 32768,
- "storage.buffercache.size" : 50331648,
- "storage.lsm.bloomfilter.falsepositiverate" : 0.01,
- "storage.memorycomponent.globalbudget" : 536870912,
- "storage.memorycomponent.numcomponents" : 2,
- "storage.memorycomponent.numpages" : 8,
- "storage.memorycomponent.pagesize" : 131072,
- "storage.metadata.memorycomponent.numpages" : 85,
- "transaction.log.dirs" : {
- "asterix_nc1" : "target/txnLogDir/asterix_nc1",
- "asterix_nc2" : "target/txnLogDir/asterix_nc2"
- },
+ "replication.log.batchsize" : 4096,
+ "replication.log.buffer.numpages" : 8,
+ "replication.log.buffer.pagesize" : 131072,
+ "replication.max.remote.recovery.attempts" : 5,
+ "replication.timeout" : 30,
"txn.commitprofiler.reportinterval" : 5,
"txn.job.recovery.memorysize" : 67108864,
"txn.lock.escalationthreshold" : 1000,
@@ -120,10 +42,7 @@
"txn.log.checkpoint.history" : 0,
"txn.log.checkpoint.lsnthreshold" : 67108864,
"txn.log.checkpoint.pollfrequency" : 120,
- "txn.log.partitionsize" : 268435456,
- "web.port" : 19001,
- "web.queryinterface.port" : 19006,
- "web.secondary.port" : 19005
+ "txn.log.partitionsize" : 268435456
},
"diagnosticsUri" : "http://127.0.0.1:19002/admin/diagnostics",
"fullShutdownUri" : "http://127.0.0.1:19002/admin/shutdown?all=true",
@@ -155,7 +74,6 @@
"statsUri" : "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats",
"threadDumpUri" : "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump"
} ],
- "replicationUri" : "http://127.0.0.1:19002/admin/cluster/replication",
"shutdownUri" : "http://127.0.0.1:19002/admin/shutdown",
"state" : "ACTIVE",
"versionUri" : "http://127.0.0.1:19002/admin/version"
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.adm
index 76219aa..9d1ba6e 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/cluster_state_1_less/cluster_state_1_less.1.adm
@@ -5,110 +5,32 @@
"threadDumpUri" : "http://127.0.0.1:19002/admin/cluster/cc/threaddump"
},
"config" : {
- "api.port" : 19002,
- "cluster.partitions" : {
- "0" : {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- },
- "1" : {
- "partitionId" : 1,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 1
- },
- "2" : {
- "partitionId" : 2,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 0
- },
- "3" : {
- "partitionId" : 3,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 1
- }
- },
"compiler.framesize" : 32768,
"compiler.groupmemory" : 163840,
"compiler.joinmemory" : 262144,
"compiler.parallelism" : 3,
"compiler.pregelix.home" : "~/pregelix",
"compiler.sortmemory" : 327680,
- "core.dump.paths" : { },
"feed.central.manager.port" : 4500,
"feed.max.threshold.period" : 5,
"feed.memory.available.wait.timeout" : 10,
"feed.memory.global.budget" : 67108864,
"feed.pending.work.threshold" : 50,
- "feed.port" : 19003,
- "instance.name" : null,
+ "instance.name" : "DEFAULT_INSTANCE",
"log.level" : "WARNING",
"max.wait.active.cluster" : 60,
+ "messaging.frame.count" : 512,
+ "messaging.frame.size" : 4096,
"metadata.callback.port" : 0,
+ "metadata.listen.port" : 0,
"metadata.node" : "asterix_nc1",
- "metadata.partition" : {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- },
- "metadata.port" : 0,
"metadata.registration.timeout.secs" : 60,
- "node.partitions" : {
- "asterix_nc1" : [ {
- "partitionId" : 0,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 0
- }, {
- "partitionId" : 1,
- "nodeId" : "asterix_nc1",
- "activeNodeId" : "asterix_nc1",
- "active" : true,
- "iodeviceNum" : 1
- } ],
- "asterix_nc2" : [ {
- "partitionId" : 2,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 0
- }, {
- "partitionId" : 3,
- "nodeId" : "asterix_nc2",
- "activeNodeId" : "asterix_nc2",
- "active" : true,
- "iodeviceNum" : 1
- } ]
- },
- "node.stores" : {
- "asterix_nc1" : [ "iodevice0", "iodevice1" ],
- "asterix_nc2" : [ "iodevice0", "iodevice1" ]
- },
"plot.activate" : false,
- "storage.buffercache.maxopenfiles" : 2147483647,
- "storage.buffercache.pagesize" : 32768,
- "storage.buffercache.size" : 50331648,
- "storage.lsm.bloomfilter.falsepositiverate" : 0.01,
- "storage.memorycomponent.globalbudget" : 536870912,
- "storage.memorycomponent.numcomponents" : 2,
- "storage.memorycomponent.numpages" : 8,
- "storage.memorycomponent.pagesize" : 131072,
- "storage.metadata.memorycomponent.numpages" : 85,
- "transaction.log.dirs" : {
- "asterix_nc1" : "target/txnLogDir/asterix_nc1",
- "asterix_nc2" : "target/txnLogDir/asterix_nc2"
- },
+ "replication.log.batchsize" : 4096,
+ "replication.log.buffer.numpages" : 8,
+ "replication.log.buffer.pagesize" : 131072,
+ "replication.max.remote.recovery.attempts" : 5,
+ "replication.timeout" : 30,
"txn.commitprofiler.reportinterval" : 5,
"txn.job.recovery.memorysize" : 67108864,
"txn.lock.escalationthreshold" : 1000,
@@ -120,10 +42,7 @@
"txn.log.checkpoint.history" : 0,
"txn.log.checkpoint.lsnthreshold" : 67108864,
"txn.log.checkpoint.pollfrequency" : 120,
- "txn.log.partitionsize" : 268435456,
- "web.port" : 19001,
- "web.queryinterface.port" : 19006,
- "web.secondary.port" : 19005
+ "txn.log.partitionsize" : 268435456
},
"diagnosticsUri" : "http://127.0.0.1:19002/admin/diagnostics",
"fullShutdownUri" : "http://127.0.0.1:19002/admin/shutdown?all=true",
@@ -155,7 +74,6 @@
"statsUri" : "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/stats",
"threadDumpUri" : "http://127.0.0.1:19002/admin/cluster/node/asterix_nc2/threaddump"
} ],
- "replicationUri" : "http://127.0.0.1:19002/admin/cluster/replication",
"shutdownUri" : "http://127.0.0.1:19002/admin/shutdown",
"state" : "ACTIVE",
"versionUri" : "http://127.0.0.1:19002/admin/version"
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/replication/replication.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/api/replication/replication.1.adm
deleted file mode 100644
index a614faa..0000000
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/api/replication/replication.1.adm
+++ /dev/null
@@ -1,9 +0,0 @@
-{
- "config" : {
- "log.batchsize" : 4096,
- "log.buffer.numpages" : 8,
- "log.buffer.pagesize" : 131072,
- "max.remote.recovery.attempts" : 5,
- "timeout" : 30
- }
-}
diff --git a/asterixdb/asterix-client-helper/pom.xml b/asterixdb/asterix-client-helper/pom.xml
index 726e8fa..bf8a51e 100644
--- a/asterixdb/asterix-client-helper/pom.xml
+++ b/asterixdb/asterix-client-helper/pom.xml
@@ -118,7 +118,7 @@
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<configuration>
- <usedDependencies>
+ <usedDependencies combine.children="append">
<usedDependency>org.codehaus.mojo.appassembler:appassembler-booter</usedDependency>
</usedDependencies>
</configuration>
diff --git a/asterixdb/asterix-common/pom.xml b/asterixdb/asterix-common/pom.xml
index 65e82a2..60f2af0 100644
--- a/asterixdb/asterix-common/pom.xml
+++ b/asterixdb/asterix-common/pom.xml
@@ -273,6 +273,10 @@
<artifactId>hyracks-http</artifactId>
</dependency>
<dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-control-common</artifactId>
+ </dependency>
+ <dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
</dependency>
@@ -288,5 +292,9 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
</dependency>
+ <dependency>
+ <groupId>args4j</groupId>
+ <artifactId>args4j</artifactId>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IClusterStateManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IClusterStateManager.java
index d971f48..bf03d54 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IClusterStateManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IClusterStateManager.java
@@ -21,6 +21,7 @@
import java.util.Map;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public interface IClusterStateManager {
@@ -65,7 +66,7 @@
/**
* @return a map of nodeId and NC Configuration for active nodes.
*/
- Map<String, Map<String, String>> getActiveNcConfiguration();
+ Map<String, Map<IOption, Object>> getActiveNcConfiguration();
/**
* @return The current metadata node Id.
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AbstractProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AbstractProperties.java
index eccbff2..a5bb945 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AbstractProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AbstractProperties.java
@@ -18,63 +18,11 @@
*/
package org.apache.asterix.common.config;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.function.UnaryOperator;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
public abstract class AbstractProperties {
- private static final Logger LOGGER = Logger.getLogger(AbstractProperties.class.getName());
- private static final List<AbstractProperties> IMPLS = Collections.synchronizedList(new ArrayList<>());
protected final PropertiesAccessor accessor;
public AbstractProperties(PropertiesAccessor accessor) {
this.accessor = accessor;
- IMPLS.add(this);
- }
-
- public Map<String, Object> getProperties() {
- return getProperties(UnaryOperator.identity());
- }
-
- public Map<String, Object> getProperties(UnaryOperator<String> keyTransformer) {
- Map<String, Object> properties = new HashMap<>();
- for (Method m : getClass().getMethods()) {
- PropertyKey key = m.getAnnotation(PropertyKey.class);
- Stringify stringify = m.getAnnotation(Stringify.class);
- if (key != null) {
- try {
- if (stringify != null) {
- properties.put(keyTransformer.apply(key.value()), String.valueOf(m.invoke(this)));
- } else {
- properties.put(keyTransformer.apply(key.value()), m.invoke(this));
- }
- } catch (Exception e) {
- LOGGER.log(Level.INFO, "Error accessing property: " + key.value(), e);
- }
- }
- }
- return properties;
- }
-
- @Retention(RetentionPolicy.RUNTIME)
- public @interface PropertyKey {
- String value();
- }
-
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Stringify {
- }
-
- public static List<AbstractProperties> getImplementations() {
- return Collections.unmodifiableList(IMPLS);
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
index 3ae2bd9..5f3a1aa 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
@@ -18,37 +18,40 @@
*/
package org.apache.asterix.common.config;
-import java.io.File;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.hyracks.api.config.IConfigManager;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
public class AsterixProperties {
- //---------------------------- Directories ---------------------------//
- private static final String VAR = File.separator + "var";
- private static final String LIB = VAR + File.separator + "lib";
- private static final String ASTERIXDB = LIB + File.separator + "asterixdb";
- //----------------------------- Sections -----------------------------//
- public static final String SECTION_ASTERIX = "asterix";
+
public static final String SECTION_PREFIX_EXTENSION = "extension/";
- public static final String SECTION_CC = "cc";
- public static final String SECTION_PREFIX_NC = "nc/";
- //---------------------------- Properties ---=------------------------//
- public static final String PROPERTY_CLUSTER_ADDRESS = "cluster.address";
- public static final String PROPERTY_INSTANCE_NAME = "instance";
- public static final String DEFAULT_INSTANCE_NAME = "DEFAULT_INSTANCE";
- public static final String PROPERTY_METADATA_NODE = "metadata.node";
- public static final String PROPERTY_COREDUMP_DIR = "coredumpdir";
- public static final String DEFAULT_COREDUMP_DIR = String.join(File.separator, ASTERIXDB, "coredump");
- public static final String PROPERTY_TXN_LOG_DIR = "txnlogdir";
- public static final String DEFAULT_TXN_LOG_DIR = String.join(File.separator, ASTERIXDB, "txn-log");
- public static final String PROPERTY_IO_DEV = "iodevices";
- public static final String DEFAULT_IO_DEV = String.join(File.separator, ASTERIXDB, "iodevice");
- public static final String PROPERTY_STORAGE_DIR = "storagedir";
- public static final String DEFAULT_STORAGE_DIR = "storage";
- public static final String PROPERTY_CLASS = "class";
private AsterixProperties() {
}
- public static final String getSectionId(String prefix, String section) {
+ public static String getSectionId(String prefix, String section) {
return section.substring(prefix.length());
}
+
+ public static void registerConfigOptions(IConfigManager configManager) {
+ configManager.register(
+ NodeProperties.Option.class,
+ CompilerProperties.Option.class,
+ MetadataProperties.Option.class,
+ ExternalProperties.Option.class,
+ FeedProperties.Option.class,
+ MessagingProperties.Option.class,
+ ReplicationProperties.Option.class,
+ StorageProperties.Option.class,
+ TransactionProperties.Option.class);
+
+ // we need to process the old-style asterix config before we apply defaults!
+ configManager.addConfigurator(IConfigManager.APPLY_DEFAULTS_METRIC - 1, () -> {
+ try {
+ PropertiesAccessor.getInstance(configManager.getAppConfig());
+ } catch (AsterixException e) {
+ throw new HyracksDataException(e);
+ }
+ });
+ }
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/BuildProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/BuildProperties.java
index a1d4703..7a36dbe 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/BuildProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/BuildProperties.java
@@ -27,89 +27,6 @@
super(accessor);
}
- public String getUserEmail() {
- return accessor.getProperty("git.build.user.email", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getBuildHost() {
- return accessor.getProperty("git.build.host", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getDirty() {
- return accessor.getProperty("git.dirty", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getRemoteOriginUrl() {
- return accessor.getProperty("git.remote.origin.url", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getClosestTagName() {
- return accessor.getProperty("git.closest.tag.name", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitIdDescribeShort() {
- return accessor.getProperty("git.commit.id.describe-short", "",
- PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitUserEmail() {
- return accessor.getProperty("git.commit.user.email", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitTime() {
- return accessor.getProperty("git.commit.time", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitMessage() {
- return accessor.getProperty("git.commit.message.full", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getBuildVersion() {
- return accessor.getProperty("git.build.version", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitMessageShort() {
- return accessor.getProperty("git.commit.message.short", "",
- PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getShortCommitId() {
- return accessor.getProperty("git.commit.id.abbrev", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getBranch() {
- return accessor.getProperty("git.branch", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getBuildUserName() {
- return accessor.getProperty("git.build.user.name", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getClosestTagCommitCount() {
- return accessor.getProperty("git.closest.tag.commit.count", "",
- PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitIdDescribe() {
- return accessor.getProperty("git.commit.id.describe", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitId() {
- return accessor.getProperty("git.commit.id", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getTags() {
- return accessor.getProperty("git.tags", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getBuildTime() {
- return accessor.getProperty("git.build.time", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
- public String getCommitUserName() {
- return accessor.getProperty("git.commit.user.name", "", PropertyInterpreters.getStringPropertyInterpreter());
- }
-
public Map<String, String> getAllProps() {
return accessor.getBuildProperties();
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
index 3710e54b0..f97c5a5 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
@@ -18,68 +18,91 @@
*/
package org.apache.asterix.common.config;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER_BYTE_UNIT;
+import static org.apache.hyracks.control.common.config.OptionTypes.LONG_BYTE_UNIT;
+import static org.apache.hyracks.control.common.config.OptionTypes.STRING;
import static org.apache.hyracks.util.StorageUtil.StorageUnit.KILOBYTE;
import static org.apache.hyracks.util.StorageUtil.StorageUnit.MEGABYTE;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.util.StorageUtil;
public class CompilerProperties extends AbstractProperties {
- public static final String COMPILER_SORTMEMORY_KEY = "compiler.sortmemory";
- private static final long COMPILER_SORTMEMORY_DEFAULT = StorageUtil.getSizeInBytes(32, MEGABYTE);
+ public enum Option implements IOption {
+ COMPILER_SORTMEMORY(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(32L, MEGABYTE)),
+ COMPILER_JOINMEMORY(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(32L, MEGABYTE)),
+ COMPILER_GROUPMEMORY(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(32L, MEGABYTE)),
+ COMPILER_FRAMESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(32, KILOBYTE)),
+ COMPILER_PARALLELISM(INTEGER, COMPILER_PARALLELISM_AS_STORAGE),
+ COMPILER_PREGELIX_HOME(STRING, "~/pregelix");
- public static final String COMPILER_GROUPMEMORY_KEY = "compiler.groupmemory";
- private static final long COMPILER_GROUPMEMORY_DEFAULT = StorageUtil.getSizeInBytes(32, MEGABYTE);
+ private final IOptionType type;
+ private final Object defaultValue;
- public static final String COMPILER_JOINMEMORY_KEY = "compiler.joinmemory";
- private static final long COMPILER_JOINMEMORY_DEFAULT = StorageUtil.getSizeInBytes(32, MEGABYTE);
+ Option(IOptionType type, Object defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
- private static final String COMPILER_FRAMESIZE_KEY = "compiler.framesize";
- private static final int COMPILER_FRAMESIZE_DEFAULT = StorageUtil.getSizeInBytes(32, KILOBYTE);
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
- public static final String COMPILER_PARALLELISM_KEY = "compiler.parallelism";
+ @Override
+ public String description() {
+ return "";
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+ }
+ public static final String COMPILER_SORTMEMORY_KEY = Option.COMPILER_SORTMEMORY.ini();
+
+ public static final String COMPILER_GROUPMEMORY_KEY = Option.COMPILER_GROUPMEMORY.ini();
+
+ public static final String COMPILER_JOINMEMORY_KEY = Option.COMPILER_JOINMEMORY.ini();
+
+ public static final String COMPILER_PARALLELISM_KEY = Option.COMPILER_PARALLELISM.ini();
+
public static final int COMPILER_PARALLELISM_AS_STORAGE = 0;
- private static final String COMPILER_PREGELIX_HOME = "compiler.pregelix.home";
- private static final String COMPILER_PREGELIX_HOME_DEFAULT = "~/pregelix";
-
public CompilerProperties(PropertiesAccessor accessor) {
super(accessor);
}
- @PropertyKey(COMPILER_SORTMEMORY_KEY)
public long getSortMemorySize() {
- return accessor.getProperty(COMPILER_SORTMEMORY_KEY, COMPILER_SORTMEMORY_DEFAULT,
- PropertyInterpreters.getLongBytePropertyInterpreter());
+ return accessor.getLong(Option.COMPILER_SORTMEMORY);
}
- @PropertyKey(COMPILER_JOINMEMORY_KEY)
public long getJoinMemorySize() {
- return accessor.getProperty(COMPILER_JOINMEMORY_KEY, COMPILER_JOINMEMORY_DEFAULT,
- PropertyInterpreters.getLongBytePropertyInterpreter());
+ return accessor.getLong(Option.COMPILER_JOINMEMORY);
}
- @PropertyKey(COMPILER_GROUPMEMORY_KEY)
public long getGroupMemorySize() {
- return accessor.getProperty(COMPILER_GROUPMEMORY_KEY, COMPILER_GROUPMEMORY_DEFAULT,
- PropertyInterpreters.getLongBytePropertyInterpreter());
+ return accessor.getLong(Option.COMPILER_GROUPMEMORY);
}
- @PropertyKey(COMPILER_FRAMESIZE_KEY)
public int getFrameSize() {
- return accessor.getProperty(COMPILER_FRAMESIZE_KEY, COMPILER_FRAMESIZE_DEFAULT,
- PropertyInterpreters.getIntegerBytePropertyInterpreter());
+ return accessor.getInt(Option.COMPILER_FRAMESIZE);
}
- @PropertyKey(COMPILER_PARALLELISM_KEY)
public int getParallelism() {
- return accessor.getProperty(COMPILER_PARALLELISM_KEY, COMPILER_PARALLELISM_AS_STORAGE,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.COMPILER_PARALLELISM);
}
- @PropertyKey(COMPILER_PREGELIX_HOME)
public String getPregelixHome() {
- return accessor.getProperty(COMPILER_PREGELIX_HOME, COMPILER_PREGELIX_HOME_DEFAULT,
- PropertyInterpreters.getStringPropertyInterpreter());
+ return accessor.getString(Option.COMPILER_PREGELIX_HOME);
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
index eb6bda5..2f85221 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
@@ -18,101 +18,107 @@
*/
package org.apache.asterix.common.config;
-import java.util.logging.Level;
+import static org.apache.hyracks.control.common.config.OptionTypes.*;
+
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
public class ExternalProperties extends AbstractProperties {
- public static final String EXTERNAL_WEBPORT_KEY = "web.port";
- public static final int EXTERNAL_WEBPORT_DEFAULT = 19001;
+ public enum Option implements IOption {
+ WEB_PORT(INTEGER, 19001),
+ WEB_QUERYINTERFACE_PORT(INTEGER, 19006),
+ API_PORT(INTEGER, 19002),
+ FEED_PORT(INTEGER, 19003),
+ LOG_LEVEL(LEVEL, java.util.logging.Level.WARNING),
+ MAX_WAIT_ACTIVE_CLUSTER(INTEGER, 60),
+ PLOT_ACTIVATE(BOOLEAN, false),
+ CC_JAVA_OPTS(STRING, "-Xmx1024m"),
+ NC_JAVA_OPTS(STRING, "-Xmx1024m");
- public static final String EXTERNAL_SECONDARY_WEBPORT_KEY = "web.secondary.port";
- public static final int EXTERNAL_SECONDARY_WEBPORT_DEFAULT = 19005;
+ private final IOptionType type;
+ private final Object defaultValue;
- public static final String QUERY_WEBPORT_KEY = "web.queryinterface.port";
- public static final int QUERY_WEBPORT_DEFAULT = 19006;
+ Option(IOptionType type, Object defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
- public static final String EXTERNAL_LOGLEVEL_KEY = "log.level";
- public static final Level EXTERNAL_LOGLEVEL_DEFAULT = Level.WARNING;
+ @Override
+ public Section section() {
+ switch (this) {
+ case WEB_PORT:
+ case WEB_QUERYINTERFACE_PORT:
+ case API_PORT:
+ case FEED_PORT:
+ return Section.CC;
+ case LOG_LEVEL:
+ case MAX_WAIT_ACTIVE_CLUSTER:
+ case PLOT_ACTIVATE:
+ return Section.COMMON;
+ case CC_JAVA_OPTS:
+ case NC_JAVA_OPTS:
+ return Section.VIRTUAL;
+ default:
+ throw new IllegalStateException("NYI: " + this);
+ }
+ }
- public static final String EXTERNAL_APISERVER_KEY = "api.port";
- public static final int EXTERNAL_APISERVER_DEFAULT = 19002;
+ @Override
+ public String description() {
+ // TODO(mblow): add descriptions
+ return null;
+ }
- public static final String EXTERNAL_FEEDSERVER_KEY = "feed.port";
- public static final int EXTERNAL_FEEDSERVER_DEFAULT = 19003;
+ @Override
+ public IOptionType type() {
+ return type;
+ }
- public static final String EXTERNAL_CC_JAVA_OPTS_KEY = "cc.java.opts";
- public static final String EXTERNAL_CC_JAVA_OPTS_DEFAULT = "-Xmx1024m";
-
- public static final String EXTERNAL_NC_JAVA_OPTS_KEY = "nc.java.opts";
- public static final String EXTERNAL_NC_JAVA_OPTS_DEFAULT = "-Xmx1024m";
-
- public static final String EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER = "max.wait.active.cluster";
- public static final int EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER_DEFAULT = 60;
-
- public static final String EXTERNAL_PLOT_ACTIVATE = "plot.activate";
- public static final boolean EXTERNAL_PLOT_ACTIVATE_DEFAULT = false;
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+ }
public ExternalProperties(PropertiesAccessor accessor) {
super(accessor);
}
- @PropertyKey(EXTERNAL_WEBPORT_KEY)
public int getWebInterfacePort() {
- return accessor.getProperty(EXTERNAL_WEBPORT_KEY, EXTERNAL_WEBPORT_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.WEB_PORT);
}
- @PropertyKey(EXTERNAL_SECONDARY_WEBPORT_KEY)
- public int getSecondaryWebInterfacePort() {
- return accessor.getProperty(EXTERNAL_SECONDARY_WEBPORT_KEY, EXTERNAL_SECONDARY_WEBPORT_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(QUERY_WEBPORT_KEY)
public int getQueryWebInterfacePort() {
- return accessor.getProperty(QUERY_WEBPORT_KEY, QUERY_WEBPORT_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.WEB_QUERYINTERFACE_PORT);
}
- @PropertyKey(EXTERNAL_APISERVER_KEY)
public int getAPIServerPort() {
- return accessor.getProperty(EXTERNAL_APISERVER_KEY, EXTERNAL_APISERVER_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.API_PORT);
}
- @PropertyKey(EXTERNAL_FEEDSERVER_KEY)
public int getFeedServerPort() {
- return accessor.getProperty(EXTERNAL_FEEDSERVER_KEY, EXTERNAL_FEEDSERVER_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.FEED_PORT);
}
- @PropertyKey(EXTERNAL_LOGLEVEL_KEY)
- @Stringify
- public Level getLogLevel() {
- return accessor.getProperty(EXTERNAL_LOGLEVEL_KEY, EXTERNAL_LOGLEVEL_DEFAULT,
- PropertyInterpreters.getLevelPropertyInterpreter());
+ public java.util.logging.Level getLogLevel() {
+ return accessor.getLoggingLevel(Option.LOG_LEVEL);
+ }
+
+ public int getMaxWaitClusterActive() {
+ return accessor.getInt(Option.MAX_WAIT_ACTIVE_CLUSTER);
+ }
+
+ public boolean getIsPlottingEnabled() {
+ return accessor.getBoolean(Option.PLOT_ACTIVATE);
}
public String getNCJavaParams() {
- return accessor.getProperty(EXTERNAL_NC_JAVA_OPTS_KEY, EXTERNAL_NC_JAVA_OPTS_DEFAULT,
- PropertyInterpreters.getStringPropertyInterpreter());
+ return accessor.getString(Option.NC_JAVA_OPTS);
}
public String getCCJavaParams() {
- return accessor.getProperty(EXTERNAL_CC_JAVA_OPTS_KEY, EXTERNAL_CC_JAVA_OPTS_DEFAULT,
- PropertyInterpreters.getStringPropertyInterpreter());
+ return accessor.getString(Option.CC_JAVA_OPTS);
}
-
- @PropertyKey(EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER)
- public int getMaxWaitClusterActive() {
- return accessor.getProperty(EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER, EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(EXTERNAL_PLOT_ACTIVATE)
- public Boolean getIsPlottingEnabled() {
- return accessor.getProperty(EXTERNAL_PLOT_ACTIVATE, EXTERNAL_PLOT_ACTIVATE_DEFAULT,
- PropertyInterpreters.getBooleanPropertyInterpreter());
- }
-
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/FeedProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/FeedProperties.java
index 0afbbd3..a96d746 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/FeedProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/FeedProperties.java
@@ -18,65 +18,84 @@
*/
package org.apache.asterix.common.config;
-import org.apache.hyracks.util.StorageUtil;
-
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.LONG;
+import static org.apache.hyracks.control.common.config.OptionTypes.LONG_BYTE_UNIT;
import static org.apache.hyracks.util.StorageUtil.StorageUnit.MEGABYTE;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.util.StorageUtil;
+
public class FeedProperties extends AbstractProperties {
- private static final String FEED_CENTRAL_MANAGER_PORT_KEY = "feed.central.manager.port";
- private static final int FEED_CENTRAL_MANAGER_PORT_DEFAULT = 4500;
+ public enum Option implements IOption {
+ FEED_PENDING_WORK_THRESHOLD(INTEGER, 50),
+ FEED_MEMORY_GLOBAL_BUDGET(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(64L, MEGABYTE)),
+ FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT(LONG, 10L),
+ FEED_CENTRAL_MANAGER_PORT(INTEGER, 4500),
+ FEED_MAX_THRESHOLD_PERIOD(INTEGER, 5);
- private static final String FEED_MEMORY_GLOBALBUDGET_KEY = "feed.memory.global.budget";
- private static final long FEED_MEMORY_GLOBALBUDGET_DEFAULT = StorageUtil.getSizeInBytes(64, MEGABYTE);
- // i.e. 2048 frames (assuming 32768 as frame size)
+ private final IOptionType type;
+ private final Object defaultValue;
- private static final String FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT_KEY = "feed.memory.available.wait.timeout";
- private static final long FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT_DEFAULT = 10; // 10 seconds
+ Option(IOptionType type, Object defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
- private static final String FEED_PENDING_WORK_THRESHOLD_KEY = "feed.pending.work.threshold";
- private static final int FEED_PENDING_WORK_THRESHOLD_DEFAULT = 50;
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
- private static final String FEED_MAX_SUCCESSIVE_THRESHOLD_PERIOD_KEY = "feed.max.threshold.period";
- private static final int FEED_MAX_SUCCESSIVE_THRESHOLD_PERIOD_DEFAULT = 5;
+ @Override
+ public String description() {
+ // TODO(mblow): add missing descriptions
+ switch (this) {
+ case FEED_CENTRAL_MANAGER_PORT:
+ return "port at which the Central Feed Manager listens for control messages from local Feed " +
+ "Managers";
+ case FEED_MAX_THRESHOLD_PERIOD:
+ return "maximum length of input queue before triggering corrective action";
+ default:
+ return null;
+ }
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+ }
public FeedProperties(PropertiesAccessor accessor) {
super(accessor);
}
- @PropertyKey(FEED_MEMORY_GLOBALBUDGET_KEY)
- public long getMemoryComponentGlobalBudget() {
- return accessor.getProperty(FEED_MEMORY_GLOBALBUDGET_KEY, FEED_MEMORY_GLOBALBUDGET_DEFAULT,
- PropertyInterpreters.getLongBytePropertyInterpreter());
- }
-
- @PropertyKey(FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT_KEY)
- public long getMemoryAvailableWaitTimeout() {
- return accessor.getProperty(FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT_KEY, FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT_DEFAULT,
- PropertyInterpreters.getLongPropertyInterpreter());
- }
-
- /**
- * @return port at which the Central Feed Manager listens for control messages from local Feed Managers
- */
- @PropertyKey(FEED_CENTRAL_MANAGER_PORT_KEY)
- public int getFeedCentralManagerPort() {
- return accessor.getProperty(FEED_CENTRAL_MANAGER_PORT_KEY, FEED_CENTRAL_MANAGER_PORT_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- /**
- * @return maximum length of input queue before triggering corrective action
- */
- @PropertyKey(FEED_PENDING_WORK_THRESHOLD_KEY)
public int getPendingWorkThreshold() {
- return accessor.getProperty(FEED_PENDING_WORK_THRESHOLD_KEY, FEED_PENDING_WORK_THRESHOLD_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.FEED_PENDING_WORK_THRESHOLD);
}
- @PropertyKey(FEED_MAX_SUCCESSIVE_THRESHOLD_PERIOD_KEY)
+ public long getMemoryComponentGlobalBudget() {
+ return accessor.getLong(Option.FEED_MEMORY_GLOBAL_BUDGET);
+ }
+
+ public long getMemoryAvailableWaitTimeout() {
+ return accessor.getLong(Option.FEED_MEMORY_AVAILABLE_WAIT_TIMEOUT);
+ }
+
+ public int getFeedCentralManagerPort() {
+ return accessor.getInt(Option.FEED_CENTRAL_MANAGER_PORT);
+ }
+
public int getMaxSuccessiveThresholdPeriod() {
- return accessor.getProperty(FEED_MAX_SUCCESSIVE_THRESHOLD_PERIOD_KEY,
- FEED_MAX_SUCCESSIVE_THRESHOLD_PERIOD_DEFAULT, PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.FEED_MAX_THRESHOLD_PERIOD);
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
index 1de1523..4637437 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
@@ -19,21 +19,23 @@
package org.apache.asterix.common.config;
public interface IPropertiesProvider {
- public StorageProperties getStorageProperties();
+ StorageProperties getStorageProperties();
- public TransactionProperties getTransactionProperties();
+ TransactionProperties getTransactionProperties();
- public CompilerProperties getCompilerProperties();
+ CompilerProperties getCompilerProperties();
- public MetadataProperties getMetadataProperties();
+ MetadataProperties getMetadataProperties();
- public ExternalProperties getExternalProperties();
+ ExternalProperties getExternalProperties();
- public FeedProperties getFeedProperties();
+ FeedProperties getFeedProperties();
BuildProperties getBuildProperties();
- public ReplicationProperties getReplicationProperties();
+ ReplicationProperties getReplicationProperties();
- public MessagingProperties getMessagingProperties();
+ MessagingProperties getMessagingProperties();
+
+ NodeProperties getNodeProperties();
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java
index 2e98e1e..8097b5f 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java
@@ -18,28 +18,60 @@
*/
package org.apache.asterix.common.config;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER_BYTE_UNIT;
+import static org.apache.hyracks.util.StorageUtil.StorageUnit.KILOBYTE;
+
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.util.StorageUtil;
-import org.apache.hyracks.util.StorageUtil.StorageUnit;
public class MessagingProperties extends AbstractProperties {
- private static final String MESSAGING_FRAME_SIZE_KEY = "messaging.frame.size";
- private static final int MESSAGING_FRAME_SIZE_DEFAULT = StorageUtil.getSizeInBytes(4, StorageUnit.KILOBYTE);
+ public enum Option implements IOption {
+ MESSAGING_FRAME_SIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(4, KILOBYTE)),
+ MESSAGING_FRAME_COUNT(INTEGER, 512);
- private static final String MESSAGING_FRAME_COUNT_KEY = "messaging.frame.count";
- private static final int MESSAGING_BUFFER_COUNTE_DEFAULT = 512;
+ private final IOptionType type;
+ private final Object defaultValue;
+
+ Option(IOptionType type, Object defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
+
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
+
+ @Override
+ public String description() {
+ // TODO(mblow): add missing descriptions
+ return null;
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+ }
public MessagingProperties(PropertiesAccessor accessor) {
super(accessor);
}
public int getFrameSize() {
- return accessor.getProperty(MESSAGING_FRAME_SIZE_KEY, MESSAGING_FRAME_SIZE_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.MESSAGING_FRAME_SIZE);
}
public int getFrameCount() {
- return accessor.getProperty(MESSAGING_FRAME_COUNT_KEY, MESSAGING_BUFFER_COUNTE_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.MESSAGING_FRAME_COUNT);
}
}
\ No newline at end of file
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java
index ab65b71..948bdad 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MetadataProperties.java
@@ -18,90 +18,135 @@
*/
package org.apache.asterix.common.config;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.STRING;
+
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import org.apache.asterix.common.cluster.ClusterPartition;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
public class MetadataProperties extends AbstractProperties {
- private static final String METADATA_REGISTRATION_TIMEOUT_KEY = "metadata.registration.timeout.secs";
- private static final long METADATA_REGISTRATION_TIMEOUT_DEFAULT = 60;
+ public enum Option implements IOption {
+ INSTANCE_NAME(STRING, "DEFAULT_INSTANCE"),
+ METADATA_NODE(STRING, null),
+ METADATA_REGISTRATION_TIMEOUT_SECS(INTEGER, 60),
+ METADATA_LISTEN_PORT(INTEGER, 0),
+ METADATA_CALLBACK_PORT(INTEGER, 0);
- private static final String METADATA_PORT_KEY = "metadata.port";
- private static final int METADATA_PORT_DEFAULT = 0;
+ private final IOptionType type;
+ private final Object defaultValue;
- private static final String METADATA_CALLBACK_PORT_KEY = "metadata.callback.port";
- private static final int METADATA_CALLBACK_PORT_DEFAULT = 0;
+ <T> Option(IOptionType<T> type, T defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
+
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
+
+ @Override
+ public String description() {
+ switch (this) {
+ case INSTANCE_NAME:
+ return "The name of this cluster instance";
+ case METADATA_NODE:
+ return "the node which should serve as the metadata node";
+ case METADATA_REGISTRATION_TIMEOUT_SECS:
+ return "how long in seconds to wait for the metadata node to register with the CC";
+ case METADATA_LISTEN_PORT:
+ return "IP port to bind metadata listener (0 = random port)";
+ case METADATA_CALLBACK_PORT:
+ return "IP port to bind metadata callback listener (0 = random port)";
+ default:
+ throw new IllegalStateException("NYI: " + this);
+ }
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+
+ @Override
+ public Object get(IApplicationConfig cfg) {
+ if (this == METADATA_NODE) {
+ Object value = cfg.getStatic(this);
+ return value != null ? value : cfg.getNCNames().isEmpty() ? null : cfg.getNCNames().get(0);
+ } else {
+ return cfg.getStatic(this);
+ }
+ }
+
+ }
public MetadataProperties(PropertiesAccessor accessor) {
super(accessor);
}
- @PropertyKey("instance.name")
public String getInstanceName() {
- return accessor.getInstanceName();
+ return accessor.getString(Option.INSTANCE_NAME);
}
- @PropertyKey("metadata.node")
public String getMetadataNodeName() {
- return accessor.getMetadataNodeName();
+ return accessor.getString(Option.METADATA_NODE);
}
- @PropertyKey("metadata.partition")
public ClusterPartition getMetadataPartition() {
- return accessor.getMetadataPartition();
+ // metadata partition is always the first partition on the metadata node
+ return accessor.getNodePartitions().get(getMetadataNodeName())[0];
}
- @PropertyKey("node.stores")
public Map<String, String[]> getStores() {
return accessor.getStores();
}
public List<String> getNodeNames() {
- return accessor.getNodeNames();
+ return accessor.getNCNames();
}
public String getCoredumpPath(String nodeId) {
return accessor.getCoredumpPath(nodeId);
}
- @PropertyKey("core.dump.paths")
public Map<String, String> getCoredumpPaths() {
return accessor.getCoredumpConfig();
}
- @PropertyKey("node.partitions")
public Map<String, ClusterPartition[]> getNodePartitions() {
return accessor.getNodePartitions();
}
- @PropertyKey("cluster.partitions")
public SortedMap<Integer, ClusterPartition> getClusterPartitions() {
return accessor.getClusterPartitions();
}
- @PropertyKey("transaction.log.dirs")
public Map<String, String> getTransactionLogDirs() {
return accessor.getTransactionLogDirs();
}
- @PropertyKey(METADATA_REGISTRATION_TIMEOUT_KEY)
- public long getRegistrationTimeoutSecs() {
- return accessor.getProperty(METADATA_REGISTRATION_TIMEOUT_KEY, METADATA_REGISTRATION_TIMEOUT_DEFAULT,
- PropertyInterpreters.getLongPropertyInterpreter());
+ public int getRegistrationTimeoutSecs() {
+ return accessor.getInt(Option.METADATA_REGISTRATION_TIMEOUT_SECS);
}
- @PropertyKey(METADATA_PORT_KEY)
public int getMetadataPort() {
- return accessor.getProperty(METADATA_PORT_KEY, METADATA_PORT_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.METADATA_LISTEN_PORT);
}
- @PropertyKey(METADATA_CALLBACK_PORT_KEY)
public int getMetadataCallbackPort() {
- return accessor.getProperty(METADATA_CALLBACK_PORT_KEY, METADATA_CALLBACK_PORT_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.METADATA_CALLBACK_PORT);
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java
new file mode 100644
index 0000000..4175873
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/NodeProperties.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.config;
+
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.OptionTypes;
+import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.hyracks.util.file.FileUtil;
+
+public class NodeProperties extends AbstractProperties {
+
+ public enum Option implements IOption {
+ INITIAL_RUN(OptionTypes.BOOLEAN, false),
+ CORE_DUMP_DIR(OptionTypes.STRING, FileUtil.joinPath(NCConfig.defaultDir, "coredump")),
+ TXN_LOG_DIR(OptionTypes.STRING, FileUtil.joinPath(NCConfig.defaultDir, "txn-log")),
+ STORAGE_SUBDIR(OptionTypes.STRING, "storage"),
+ ;
+
+ private final IOptionType type;
+ private final Object defaultValue;
+
+ <T> Option(IOptionType<T> type, T defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
+
+ @Override
+ public Section section() {
+ return Section.NC;
+ }
+
+ @Override
+ public String description() {
+ switch (this) {
+ case INITIAL_RUN:
+ return "A flag indicating if it's the first time the NC is started";
+ default:
+ return null;
+ }
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+
+ @Override
+ public boolean hidden() {
+ return this == INITIAL_RUN;
+ }
+ }
+
+ public NodeProperties(PropertiesAccessor accessor) {
+ super(accessor);
+ }
+
+ public boolean isInitialRun() {
+ return accessor.getBoolean(Option.INITIAL_RUN);
+ }
+
+ public boolean isVirtualNc() {
+ return accessor.getBoolean(NCConfig.Option.VIRTUAL_NC);
+ }
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
index 1f503ec..233daa7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertiesAccessor.java
@@ -18,20 +18,29 @@
*/
package org.apache.asterix.common.config;
+import static org.apache.asterix.common.config.MetadataProperties.Option.INSTANCE_NAME;
+import static org.apache.asterix.common.config.MetadataProperties.Option.METADATA_NODE;
+import static org.apache.asterix.common.config.NodeProperties.Option.STORAGE_SUBDIR;
+import static org.apache.hyracks.control.common.controllers.NCConfig.Option.IODEVICES;
+
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
-import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Predicate;
+import java.util.stream.Stream;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
@@ -48,22 +57,23 @@
import org.apache.asterix.common.utils.ConfigUtil;
import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.application.IApplicationConfig;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.control.common.controllers.ControllerConfig;
+import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
-public class PropertiesAccessor {
+public class PropertiesAccessor implements IApplicationConfig {
private static final Logger LOGGER = Logger.getLogger(PropertiesAccessor.class.getName());
- private static final AtomicReference<PropertiesAccessor> instanceHolder = new AtomicReference<>();
- private final String instanceName;
- private final String metadataNodeName;
- private final List<String> nodeNames = new ArrayList<>();;
- private final Map<String, String[]> stores = new HashMap<>();;
+ private static final Map<IApplicationConfig, PropertiesAccessor> instances = new ConcurrentHashMap<>();
+ private final Map<String, String[]> stores = new HashMap<>();
private final Map<String, String> coredumpConfig = new HashMap<>();
-
- // This can be removed when asterix-configuration.xml is no longer required.
- private final Map<String, Property> asterixConfigurationParams;
private final IApplicationConfig cfg;
private final Map<String, String> transactionLogDirs = new HashMap<>();
private final Map<String, String> asterixBuildProperties = new HashMap<>();
@@ -73,61 +83,105 @@
private final List<AsterixExtension> extensions;
/**
- * Constructor which reads asterix-configuration.xml, the old way.
- *
- * @throws AsterixException
- * @throws IOException
+ * Constructor which wraps an IApplicationConfig.
*/
- private PropertiesAccessor() throws AsterixException, IOException {
- String fileName = System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY);
- if (fileName == null) {
- fileName = GlobalConfig.DEFAULT_CONFIG_FILE_NAME;
- }
- AsterixConfiguration asterixConfiguration = configure(fileName);
- cfg = null;
- instanceName = asterixConfiguration.getInstanceName();
- metadataNodeName = asterixConfiguration.getMetadataNode();
- List<Store> configuredStores = asterixConfiguration.getStore();
+ private PropertiesAccessor(IApplicationConfig cfg) throws AsterixException, IOException {
+ this.cfg = cfg;
nodePartitionsMap = new HashMap<>();
- int uniquePartitionId = 0;
- // Here we iterate through all <store> elements in asterix-configuration.xml.
- // For each one, we create an array of ClusterPartitions and store this array
- // in nodePartitionsMap, keyed by the node name. The array is the same length
- // as the comma-separated <storeDirs> child element, because Managix will have
- // arranged for that element to be populated with the full paths to each
- // partition directory (as formed by appending the <store> subdirectory to
- // each <iodevices> path from the user's original cluster.xml).
- for (Store store : configuredStores) {
- String trimmedStoreDirs = store.getStoreDirs().trim();
- String[] nodeStores = trimmedStoreDirs.split(",");
- ClusterPartition[] nodePartitions = new ClusterPartition[nodeStores.length];
- for (int i = 0; i < nodePartitions.length; i++) {
- ClusterPartition partition = new ClusterPartition(uniquePartitionId++, store.getNcId(), i);
- clusterPartitions.put(partition.getPartitionId(), partition);
- nodePartitions[i] = partition;
- }
- stores.put(store.getNcId(), nodeStores);
- nodePartitionsMap.put(store.getNcId(), nodePartitions);
- nodeNames.add(store.getNcId());
- }
-
- // Get extensions
extensions = new ArrayList<>();
- if (asterixConfiguration.getExtensions() != null) {
- for (Extension ext : asterixConfiguration.getExtensions().getExtension()) {
- extensions.add(ConfigUtil.toAsterixExtension(ext));
- }
+ // Determine whether to use old-style asterix-configuration.xml or new-style configuration.
+ // QQQ strip this out eventually
+ // QQQ this is NOT a good way to determine whether to use config file
+ ConfigManager configManager = ((ConfigManagerApplicationConfig)cfg).getConfigManager();
+ boolean usingConfigFile = Stream
+ .of((IOption) ControllerConfig.Option.CONFIG_FILE, ControllerConfig.Option.CONFIG_FILE_URL)
+ .map(configManager::get).anyMatch(Objects::nonNull);
+ AsterixConfiguration asterixConfiguration = null;
+ try {
+ asterixConfiguration = configure(System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY,
+ GlobalConfig.DEFAULT_CONFIG_FILE_NAME));
+ } catch (Exception e) {
+ // cannot load config file, assume new-style config
}
- asterixConfigurationParams = new HashMap<>();
- for (Property p : asterixConfiguration.getProperty()) {
- asterixConfigurationParams.put(p.getName(), p);
- }
- for (Coredump cd : asterixConfiguration.getCoredump()) {
- coredumpConfig.put(cd.getNcId(), cd.getCoredumpPath());
- }
- for (TransactionLogDir txnLogDir : asterixConfiguration.getTransactionLogDir()) {
- transactionLogDirs.put(txnLogDir.getNcId(), txnLogDir.getTxnLogDirPath());
+ if (!usingConfigFile && asterixConfiguration != null) {
+ LOGGER.info("using old-style configuration: " + System.getProperty(GlobalConfig.CONFIG_FILE_PROPERTY));
+ if (asterixConfiguration.getInstanceName() != null) {
+ configManager.set(INSTANCE_NAME, asterixConfiguration.getInstanceName());
+ }
+ if (asterixConfiguration.getMetadataNode() != null) {
+ configManager.set(METADATA_NODE, asterixConfiguration.getMetadataNode());
+ }
+ List<Store> configuredStores = asterixConfiguration.getStore();
+
+ int uniquePartitionId = 0;
+ // Here we iterate through all <store> elements in asterix-configuration.xml.
+ // For each one, we create an array of ClusterPartitions and store this array
+ // in nodePartitionsMap, keyed by the node name. The array is the same length
+ // as the comma-separated <storeDirs> child element, because Managix will have
+ // arranged for that element to be populated with the full paths to each
+ // partition directory (as formed by appending the <store> subdirectory to
+ // each <iodevices> path from the user's original cluster.xml).
+ for (Store store : configuredStores) {
+ String trimmedStoreDirs = store.getStoreDirs().trim();
+ String[] nodeStores = trimmedStoreDirs.split(",");
+ ClusterPartition[] nodePartitions = new ClusterPartition[nodeStores.length];
+ for (int i = 0; i < nodePartitions.length; i++) {
+ ClusterPartition partition = new ClusterPartition(uniquePartitionId++, store.getNcId(), i);
+ clusterPartitions.put(partition.getPartitionId(), partition);
+ nodePartitions[i] = partition;
+ }
+ stores.put(store.getNcId(), nodeStores);
+ nodePartitionsMap.put(store.getNcId(), nodePartitions);
+ configManager.registerVirtualNode(store.getNcId());
+ // marking node as virtual, as we're not using NCServices with old-style config
+ configManager.set(store.getNcId(), NCConfig.Option.VIRTUAL_NC, true);
+ }
+ // Get extensions
+ if (asterixConfiguration.getExtensions() != null) {
+ for (Extension ext : asterixConfiguration.getExtensions().getExtension()) {
+ extensions.add(ConfigUtil.toAsterixExtension(ext));
+ }
+ }
+ for (Property p : asterixConfiguration.getProperty()) {
+ IOption option = null;
+ for (Section section : Arrays.asList(Section.COMMON, Section.CC, Section.NC)) {
+ IOption optionTemp = cfg.lookupOption(section.sectionName(), p.getName());
+ if (optionTemp == null) {
+ continue;
+ }
+ if (option != null) {
+ throw new IllegalStateException("ERROR: option found in multiple sections: " +
+ Arrays.asList(option, optionTemp));
+ }
+ option = optionTemp;
+ }
+ if (option == null) {
+ LOGGER.warn("Ignoring unknown property: " + p.getName());
+ } else {
+ configManager.set(option, option.type().parse(p.getValue()));
+ }
+ }
+ for (Coredump cd : asterixConfiguration.getCoredump()) {
+ coredumpConfig.put(cd.getNcId(), cd.getCoredumpPath());
+ }
+ for (TransactionLogDir txnLogDir : asterixConfiguration.getTransactionLogDir()) {
+ transactionLogDirs.put(txnLogDir.getNcId(), txnLogDir.getTxnLogDirPath());
+ }
+ } else {
+ LOGGER.info("using new-style configuration");
+ MutableInt uniquePartitionId = new MutableInt(0);
+ // Iterate through each configured NC.
+ for (String ncName : cfg.getNCNames()) {
+ configureNc(ncName, uniquePartitionId);
+ }
+ for (String section : cfg.getSectionNames()) {
+ if (section.startsWith(AsterixProperties.SECTION_PREFIX_EXTENSION)) {
+ String className = AsterixProperties.getSectionId(
+ AsterixProperties.SECTION_PREFIX_EXTENSION, section);
+ configureExtension(className, section);
+ }
+ }
}
loadAsterixBuildProperties();
}
@@ -163,31 +217,6 @@
}
}
- /**
- * Constructor which wraps an IApplicationConfig.
- */
- private PropertiesAccessor(IApplicationConfig cfg) throws AsterixException {
- this.cfg = cfg;
- instanceName = cfg.getString(AsterixProperties.SECTION_ASTERIX, AsterixProperties.PROPERTY_INSTANCE_NAME,
- AsterixProperties.DEFAULT_INSTANCE_NAME);
- nodePartitionsMap = new HashMap<>();
- MutableInt uniquePartitionId = new MutableInt(0);
- extensions = new ArrayList<>();
- // Iterate through each configured NC.
- for (String section : cfg.getSections()) {
- if (section.startsWith(AsterixProperties.SECTION_PREFIX_NC)) {
- configureNc(section, uniquePartitionId);
- } else if (section.startsWith(AsterixProperties.SECTION_PREFIX_EXTENSION)) {
- String className = AsterixProperties.getSectionId(AsterixProperties.SECTION_PREFIX_EXTENSION, section);
- configureExtension(className, section);
- }
- }
- metadataNodeName = getProperty(AsterixProperties.PROPERTY_METADATA_NODE,
- nodeNames.isEmpty() ? "" : nodeNames.get(0), PropertyInterpreters.getStringPropertyInterpreter());
- asterixConfigurationParams = null;
- loadAsterixBuildProperties();
- }
-
private void configureExtension(String className, String section) {
Set<String> keys = cfg.getKeys(section);
List<Pair<String, String>> kvs = new ArrayList<>();
@@ -198,27 +227,24 @@
extensions.add(new AsterixExtension(className, kvs));
}
- private void configureNc(String section, MutableInt uniquePartitionId) {
- String ncId = AsterixProperties.getSectionId(AsterixProperties.SECTION_PREFIX_NC, section);
+ private void configureNc(String ncId, MutableInt uniquePartitionId) {
// Now we assign the coredump and txnlog directories for this node.
// QQQ Default values? Should they be specified here? Or should there
// be a default.ini? Certainly wherever they are, they should be platform-dependent.
- coredumpConfig.put(ncId, cfg.getString(section, AsterixProperties.PROPERTY_COREDUMP_DIR,
- AsterixProperties.DEFAULT_COREDUMP_DIR));
+ IApplicationConfig nodeCfg = cfg.getNCEffectiveConfig(ncId);
+ coredumpConfig.put(ncId, nodeCfg.getString(NodeProperties.Option.CORE_DUMP_DIR));
transactionLogDirs.put(ncId,
- cfg.getString(section, AsterixProperties.PROPERTY_TXN_LOG_DIR, AsterixProperties.DEFAULT_TXN_LOG_DIR));
+ nodeCfg.getString(NodeProperties.Option.TXN_LOG_DIR));
// Now we create an array of ClusterPartitions for all the partitions
// on this NC.
- String[] iodevices = cfg.getString(section, AsterixProperties.PROPERTY_IO_DEV,
- AsterixProperties.DEFAULT_IO_DEV).split(",");
- String storageSubdir = cfg.getString(section, AsterixProperties.PROPERTY_STORAGE_DIR,
- AsterixProperties.DEFAULT_STORAGE_DIR);
+ String[] iodevices = nodeCfg.getStringArray(IODEVICES);
+ String storageSubdir = nodeCfg.getString(STORAGE_SUBDIR);
String[] nodeStores = new String[iodevices.length];
ClusterPartition[] nodePartitions = new ClusterPartition[iodevices.length];
for (int i = 0; i < nodePartitions.length; i++) {
- // Construct final storage path from iodevice dir + storage subdir.s
+ // Construct final storage path from iodevice dir + storage subdirs
nodeStores[i] = iodevices[i] + File.separator + storageSubdir;
// Create ClusterPartition instances for this NC.
ClusterPartition partition = new ClusterPartition(uniquePartitionId.getValue(), ncId, i);
@@ -228,7 +254,6 @@
}
stores.put(ncId, nodeStores);
nodePartitionsMap.put(ncId, nodePartitions);
- nodeNames.add(ncId);
}
private void loadAsterixBuildProperties() throws AsterixException {
@@ -248,18 +273,10 @@
}
}
- public String getMetadataNodeName() {
- return metadataNodeName;
- }
-
public Map<String, String[]> getStores() {
return stores;
}
- public List<String> getNodeNames() {
- return nodeNames;
- }
-
public String getCoredumpPath(String nodeId) {
return coredumpConfig.get(nodeId);
}
@@ -276,50 +293,19 @@
return asterixBuildProperties;
}
- public <T> T getProperty(String property, T defaultValue, IPropertyInterpreter<T> interpreter) {
- String value;
- Property p = null;
- if (asterixConfigurationParams != null) {
- p = asterixConfigurationParams.get(property);
- value = (p == null) ? null : p.getValue();
- } else {
- value = cfg.getString("app", property);
- if (value == null) {
- value = cfg.getString("asterix", property);
- if (value != null) {
- LOGGER.warn("[asterix] config section deprecated and will be removed in a future release;" +
- " please update to [app] (found: " + property + ')');
- }
- }
- }
- if (value == null) {
- return defaultValue;
- }
+ public <T> T getProperty(String property, T defaultValue, IOptionType<T> interpreter) {
+ String value = cfg.getString("common", property);
try {
- return interpreter.interpret(value);
+ return value == null ? defaultValue : interpreter.parse(value);
} catch (IllegalArgumentException e) {
if (LOGGER.isEnabledFor(Level.ERROR)) {
- StringBuilder msg = new StringBuilder(
- "Invalid property value '" + value + "' for property '" + property + "'.\n");
- if (p != null) {
- msg.append("See the description: \n" + p.getDescription() + "\n");
- }
- msg.append("Default = " + defaultValue);
- LOGGER.error(msg.toString());
+ LOGGER.error("Invalid property value '" + value + "' for property '" + property + "'.\n" +
+ "Default = " + defaultValue);
}
throw e;
}
}
- public String getInstanceName() {
- return instanceName;
- }
-
- public ClusterPartition getMetadataPartition() {
- // metadata partition is always the first partition on the metadata node
- return nodePartitionsMap.get(metadataNodeName)[0];
- }
-
public Map<String, ClusterPartition[]> getNodePartitions() {
return nodePartitionsMap;
}
@@ -333,22 +319,78 @@
}
public static PropertiesAccessor getInstance(IApplicationConfig cfg) throws IOException, AsterixException {
- // Determine whether to use old-style asterix-configuration.xml or new-style configuration.
- // QQQ strip this out eventually
- // QQQ this is NOT a good way to determine whether the config is valid
- PropertiesAccessor propertiesAccessor;
- if (cfg != null && cfg.getString("cc", "cluster.address") != null) {
- propertiesAccessor = new PropertiesAccessor(cfg);
- } else {
- propertiesAccessor = new PropertiesAccessor();
+ PropertiesAccessor accessor = instances.get(cfg);
+ if (accessor == null) {
+ accessor = new PropertiesAccessor(cfg);
+ if (instances.putIfAbsent(cfg, accessor) != null) {
+ return instances.get(cfg);
+ }
}
- if (!instanceHolder.compareAndSet(null, propertiesAccessor)) {
- propertiesAccessor = instanceHolder.get();
- }
- return propertiesAccessor;
+ return accessor;
}
- public static PropertiesAccessor getInstance() throws IOException, AsterixException {
- return getInstance(null);
+ @Override
+ public Object getStatic(IOption option) {
+ return cfg.getStatic(option);
+ }
+
+ @Override
+ public String getString(String section, String key) {
+ return cfg.getString(section, key);
+ }
+
+ @Override
+ public int getInt(String section, String key) {
+ return cfg.getInt(section, key);
+ }
+
+ @Override
+ public long getLong(String section, String key) {
+ return cfg.getLong(section, key);
+ }
+
+ @Override
+ public Set<String> getSectionNames() {
+ return cfg.getSectionNames();
+ }
+
+ @Override
+ public Set<String> getKeys(String section) {
+ return cfg.getKeys(section);
+ }
+
+ @Override
+ public List<String> getNCNames() {
+ return cfg.getNCNames();
+ }
+
+ @Override
+ public IOption lookupOption(String sectionName, String propertyName) {
+ return cfg.lookupOption(sectionName, propertyName);
+ }
+
+ @Override
+ public IApplicationConfig getNCEffectiveConfig(String nodeId) {
+ return cfg.getNCEffectiveConfig(nodeId);
+ }
+
+ @Override
+ public Set<IOption> getOptions() {
+ return cfg.getOptions();
+ }
+
+ @Override
+ public Set<IOption> getOptions(Section section) {
+ return cfg.getOptions(section);
+ }
+
+ @Override
+ public Set<Section> getSections() {
+ return cfg.getSections();
+ }
+
+ @Override
+ public Set<Section> getSections(Predicate<Section> predicate) {
+ return cfg.getSections(predicate);
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertyInterpreters.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertyInterpreters.java
deleted file mode 100644
index 6703fcd..0000000
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/PropertyInterpreters.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.config;
-
-import java.util.logging.Level;
-
-import org.apache.hyracks.util.StorageUtil;
-
-public class PropertyInterpreters {
-
- public static IPropertyInterpreter<Integer> getIntegerPropertyInterpreter() {
- return Integer::parseInt;
- }
-
- public static IPropertyInterpreter<Boolean> getBooleanPropertyInterpreter() {
- return Boolean::parseBoolean;
- }
-
- public static IPropertyInterpreter<Long> getLongPropertyInterpreter() {
- return Long::parseLong;
- }
-
- public static IPropertyInterpreter<Level> getLevelPropertyInterpreter() {
- return Level::parse;
- }
-
- public static IPropertyInterpreter<String> getStringPropertyInterpreter() {
- return s -> s;
- }
-
- public static IPropertyInterpreter<Double> getDoublePropertyInterpreter() {
- return Double::parseDouble;
- }
-
- public static IPropertyInterpreter<Long> getLongBytePropertyInterpreter() {
- return StorageUtil::getByteValue;
- }
-
- public static IPropertyInterpreter<Integer> getIntegerBytePropertyInterpreter() {
- return s -> {
- long result = StorageUtil.getByteValue(s);
- if (result > Integer.MAX_VALUE || result < Integer.MIN_VALUE) {
- throw new IllegalArgumentException(
- "The given value: " + result + " is not within the int range.");
- } else {
- return (int) result;
- }
- };
- }
-
-}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java
index 116609e..efa5296 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java
@@ -18,6 +18,9 @@
*/
package org.apache.asterix.common.config;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER_BYTE_UNIT;
+
import java.util.Set;
import java.util.stream.Collectors;
@@ -25,48 +28,108 @@
import org.apache.asterix.common.replication.Replica;
import org.apache.asterix.event.schema.cluster.Cluster;
import org.apache.asterix.event.schema.cluster.Node;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.util.StorageUtil;
import org.apache.hyracks.util.StorageUtil.StorageUnit;
public class ReplicationProperties extends AbstractProperties {
+ public enum Option implements IOption {
+ REPLICATION_MAX_REMOTE_RECOVERY_ATTEMPTS(INTEGER, 5),
+ REPLICATION_LOG_BUFFER_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128,
+ StorageUnit.KILOBYTE)),
+ REPLICATION_LOG_BUFFER_NUMPAGES(INTEGER, 8),
+ REPLICATION_LOG_BATCHSIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(4, StorageUnit.KILOBYTE)),
+ REPLICATION_TIMEOUT(INTEGER, REPLICATION_TIME_OUT_DEFAULT),
+ ;
+
+ private final IOptionType type;
+ private final Object defaultValue;
+
+ Option(IOptionType type, Object defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
+
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
+
+ @Override
+ public String description() {
+ // TODO(mblow): add missing descriptions
+ return null;
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+
+ @Override
+ public Object get(IApplicationConfig config) {
+ switch (this) {
+ case REPLICATION_TIMEOUT:
+ final Cluster cluster = ClusterProperties.INSTANCE.getCluster();
+ if (cluster != null
+ && cluster.getHighAvailability() != null
+ && cluster.getHighAvailability().getDataReplication() != null
+ && cluster.getHighAvailability().getDataReplication().getReplicationTimeOut() != null) {
+ return cluster.getHighAvailability().getDataReplication().getReplicationTimeOut().intValue();
+ }
+ return REPLICATION_TIME_OUT_DEFAULT;
+ default:
+ return config.getStatic(this);
+ }
+ }
+ }
+
private static final int REPLICATION_DATAPORT_DEFAULT = 2000;
- private static final String REPLICATION_TIMEOUT_KEY = "replication.timeout";
private static final int REPLICATION_TIME_OUT_DEFAULT = 15;
- private static final String REPLICATION_MAX_REMOTE_RECOVERY_ATTEMPTS_KEY =
- "replication.max.remote.recovery.attempts";
- private static final int MAX_REMOTE_RECOVERY_ATTEMPTS = 5;
-
private static final String NODE_IP_ADDRESS_DEFAULT = "127.0.0.1";
- private static final String REPLICATION_LOG_BATCH_SIZE_KEY = "replication.log.batchsize";
- private static final int REPLICATION_LOG_BATCH_SIZE_DEFAULT = StorageUtil.getSizeInBytes(4, StorageUnit.KILOBYTE);
-
- private static final String REPLICATION_LOG_BUFFER_NUM_PAGES_KEY = "replication.log.buffer.numpages";
- private static final int REPLICATION_LOG_BUFFER_NUM_PAGES_DEFAULT = 8;
-
- private static final String REPLICATION_LOG_BUFFER_PAGE_SIZE_KEY = "replication.log.buffer.pagesize";
- private static final int REPLICATION_LOG_BUFFER_PAGE_SIZE_DEFAULT = StorageUtil.getSizeInBytes(128,
- StorageUnit.KILOBYTE);
-
- private final Cluster cluster;
private final IReplicationStrategy repStrategy;
public ReplicationProperties(PropertiesAccessor accessor) throws HyracksDataException {
super(accessor);
- this.cluster = ClusterProperties.INSTANCE.getCluster();
this.repStrategy = ClusterProperties.INSTANCE.getReplicationStrategy();
}
+ public int getMaxRemoteRecoveryAttempts() {
+ return accessor.getInt(Option.REPLICATION_MAX_REMOTE_RECOVERY_ATTEMPTS);
+ }
+
+ public int getLogBufferPageSize() {
+ return accessor.getInt(Option.REPLICATION_LOG_BUFFER_PAGESIZE);
+ }
+
+ public int getLogBufferNumOfPages() {
+ return accessor.getInt(Option.REPLICATION_LOG_BUFFER_NUMPAGES);
+ }
+
+ public int getLogBatchSize() {
+ return accessor.getInt(Option.REPLICATION_LOG_BATCHSIZE);
+ }
+
public String getReplicaIPAddress(String nodeId) {
Node node = ClusterProperties.INSTANCE.getNodeById(nodeId);
return node != null ? node.getClusterIp() : NODE_IP_ADDRESS_DEFAULT;
}
public int getDataReplicationPort(String nodeId) {
+ final Cluster cluster = ClusterProperties.INSTANCE.getCluster();
Node node = ClusterProperties.INSTANCE.getNodeById(nodeId);
if (node != null) {
return node.getReplicationPort() != null ? node.getReplicationPort().intValue()
@@ -98,35 +161,8 @@
return remoteReplicasIds;
}
- @PropertyKey(REPLICATION_TIMEOUT_KEY)
public int getReplicationTimeOut() {
- if (cluster != null) {
- return cluster.getHighAvailability().getDataReplication().getReplicationTimeOut().intValue();
- }
- return REPLICATION_TIME_OUT_DEFAULT;
- }
-
- @PropertyKey(REPLICATION_MAX_REMOTE_RECOVERY_ATTEMPTS_KEY)
- public int getMaxRemoteRecoveryAttempts() {
- return MAX_REMOTE_RECOVERY_ATTEMPTS;
- }
-
- @PropertyKey(REPLICATION_LOG_BUFFER_PAGE_SIZE_KEY)
- public int getLogBufferPageSize() {
- return accessor.getProperty(REPLICATION_LOG_BUFFER_PAGE_SIZE_KEY, REPLICATION_LOG_BUFFER_PAGE_SIZE_DEFAULT,
- PropertyInterpreters.getIntegerBytePropertyInterpreter());
- }
-
- @PropertyKey(REPLICATION_LOG_BUFFER_NUM_PAGES_KEY)
- public int getLogBufferNumOfPages() {
- return accessor.getProperty(REPLICATION_LOG_BUFFER_NUM_PAGES_KEY, REPLICATION_LOG_BUFFER_NUM_PAGES_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(REPLICATION_LOG_BATCH_SIZE_KEY)
- public int getLogBatchSize() {
- return accessor.getProperty(REPLICATION_LOG_BATCH_SIZE_KEY, REPLICATION_LOG_BATCH_SIZE_DEFAULT,
- PropertyInterpreters.getIntegerBytePropertyInterpreter());
+ return accessor.getInt(Option.REPLICATION_TIMEOUT);
}
public boolean isParticipant(String nodeId) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java
index 68ad80c..d363f3d 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java
@@ -18,117 +18,159 @@
*/
package org.apache.asterix.common.config;
+import static org.apache.hyracks.control.common.config.OptionTypes.DOUBLE;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER_BYTE_UNIT;
+import static org.apache.hyracks.control.common.config.OptionTypes.LONG_BYTE_UNIT;
import static org.apache.hyracks.util.StorageUtil.StorageUnit.KILOBYTE;
+import java.util.function.Function;
+
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
import org.apache.hyracks.util.StorageUtil;
public class StorageProperties extends AbstractProperties {
- private static final String STORAGE_BUFFERCACHE_PAGESIZE_KEY = "storage.buffercache.pagesize";
- private static final int STORAGE_BUFFERCACHE_PAGESIZE_DEFAULT = StorageUtil.getSizeInBytes(128, KILOBYTE);
+ public enum Option implements IOption {
+ STORAGE_BUFFERCACHE_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128, KILOBYTE)),
+ // By default, uses 1/4 of the maximum heap size for read cache, i.e., disk buffer cache.
+ STORAGE_BUFFERCACHE_SIZE(LONG_BYTE_UNIT, Runtime.getRuntime().maxMemory() / 4),
+ STORAGE_BUFFERCACHE_MAXOPENFILES(INTEGER, Integer.MAX_VALUE),
+ STORAGE_MEMORYCOMPONENT_GLOBALBUDGET(LONG_BYTE_UNIT, Runtime.getRuntime().maxMemory() / 4),
+ STORAGE_MEMORYCOMPONENT_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128, KILOBYTE)),
+ STORAGE_MEMORYCOMPONENT_NUMPAGES(INTEGER, (Function<IApplicationConfig, Integer>) accessor ->
+ // By default, uses 1/16 of the STORAGE_MEMORYCOMPONENT_GLOBALBUDGET for the write buffer
+ // budget for a dataset, including data and indexes.
+ (int) (accessor.getLong(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) /
+ (16 * accessor.getInt(STORAGE_MEMORYCOMPONENT_PAGESIZE)))),
+ STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS(INTEGER, 2),
+ STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES(INTEGER, (Function<IApplicationConfig, Integer>) accessor ->
+ // By default, uses the min of 1/64 of the STORAGE_MEMORYCOMPONENT_GLOBALBUDGET and 256 pages
+ // for the write buffer budget for a metadata dataset, including data and indexes.
+ Math.min((int) (accessor.getLong(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET)
+ / (64 * accessor.getInt(STORAGE_MEMORYCOMPONENT_PAGESIZE))), 256)),
+ STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE(DOUBLE, 0.01d);
- private static final String STORAGE_BUFFERCACHE_SIZE_KEY = "storage.buffercache.size";
+ private final IOptionType interpreter;
+ private final Object defaultValue;
- private static final String STORAGE_BUFFERCACHE_MAXOPENFILES_KEY = "storage.buffercache.maxopenfiles";
- private static final int STORAGE_BUFFERCACHE_MAXOPENFILES_DEFAULT = Integer.MAX_VALUE;
+ <T> Option(IOptionType<T> interpreter, T defaultValue) {
+ this.interpreter = interpreter;
+ this.defaultValue = defaultValue;
+ }
- private static final String STORAGE_MEMORYCOMPONENT_PAGESIZE_KEY = "storage.memorycomponent.pagesize";
- private static final int STORAGE_MEMORYCOMPONENT_PAGESIZE_DEFAULT = StorageUtil.getSizeInBytes(128, KILOBYTE);
+ <T> Option(IOptionType<T> interpreter, Function<IApplicationConfig, T> defaultValueFunction) {
+ this.interpreter = interpreter;
+ this.defaultValue = defaultValueFunction;
+ }
- private static final String STORAGE_MEMORYCOMPONENT_NUMPAGES_KEY = "storage.memorycomponent.numpages";
+ @Override
+ public Section section() {
+ return Section.NC;
+ }
- private static final String STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_KEY =
- "storage.metadata.memorycomponent.numpages";
+ @Override
+ public String description() {
+ switch (this) {
+ case STORAGE_BUFFERCACHE_PAGESIZE:
+ return "The page size in bytes for pages in the buffer cache";
+ case STORAGE_BUFFERCACHE_SIZE:
+ return "The size of memory allocated to the disk buffer cache. The value should be a multiple" +
+ " of the buffer cache page size.";
+ case STORAGE_BUFFERCACHE_MAXOPENFILES:
+ return "The maximum number of open files in the buffer cache";
+ case STORAGE_MEMORYCOMPONENT_GLOBALBUDGET:
+ return "The size of memory allocated to the memory components. The value should be a multiple " +
+ "of the memory component page size";
+ case STORAGE_MEMORYCOMPONENT_PAGESIZE:
+ return "The page size in bytes for pages allocated to memory components";
+ case STORAGE_MEMORYCOMPONENT_NUMPAGES:
+ return "The number of pages to allocate for a memory component. This budget is shared by all " +
+ "the memory components of the primary index and all its secondary indexes across all I/O " +
+ "devices on a node. Note: in-memory components usually has fill factor of 75% since " +
+ "the pages are 75% full and the remaining 25% is un-utilized";
+ case STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS:
+ return "The number of memory components to be used per lsm index";
+ case STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES:
+ return "The number of pages to allocate for a metadata memory component";
+ case STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE:
+ return "The maximum acceptable false positive rate for bloom filters associated with LSM indexes";
+ default:
+ throw new IllegalStateException("NYI: " + this);
+ }
+ }
- private static final String STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS_KEY = "storage.memorycomponent.numcomponents";
- private static final int STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS_DEFAULT = 2; // 2 components
+ @Override
+ public IOptionType type() {
+ return interpreter;
+ }
- private static final String STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_KEY = "storage.memorycomponent.globalbudget";
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
- private static final String STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY =
- "storage.lsm.bloomfilter.falsepositiverate";
- private static final double STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_DEFAULT = 0.01;
- private final long storageBufferCacheSizeDefault;
- private final int storageMemoryComponentNumPages;
- private final int storageMetadataMemoryComponentNumPages;
- private final long storageMemorycomponentGlobalbudgetDefault;
+ @Override
+ public String usageDefaultOverride(IApplicationConfig accessor, Function<IOption, String> optionPrinter) {
+ switch (this) {
+ case STORAGE_MEMORYCOMPONENT_NUMPAGES:
+ return "1/16th of the " + optionPrinter.apply(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) +
+ " value";
+ case STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES:
+ return "1/64th of the " + optionPrinter.apply(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) +
+ " value or 256, whichever is larger";
+ default:
+ return null;
+ }
+ }
+ }
public StorageProperties(PropertiesAccessor accessor) {
super(accessor);
-
- // Gets the -Xmx value for the JVM.
- long maxHeapSize = Runtime.getRuntime().maxMemory();
- // By default, uses 1/4 of the maximum heap size for read cache, i.e., disk buffer cache.
- storageBufferCacheSizeDefault = maxHeapSize / 4;
- // By default, uses 1/4 of the maximum heap size for the write buffer, i.e., globalbudget for memory components.
- storageMemorycomponentGlobalbudgetDefault = maxHeapSize / 4;
- // By default, uses 1/16 of the storageMemorycomponentGlobalbudgetDefault for the write buffer budget
- // for a dataset, including data and indexes.
- storageMemoryComponentNumPages = (int) (storageMemorycomponentGlobalbudgetDefault
- / (16 * getMemoryComponentPageSize()));
- // By default, uses the min of 1/64 of the storageMemorycomponentGlobalbudgetDefault and 256 pages
- // for the write buffer budget for a metadata dataset, including data and indexes.
- storageMetadataMemoryComponentNumPages = Math
- .min((int) (storageMemorycomponentGlobalbudgetDefault / (64 * getMemoryComponentPageSize())), 256);
}
- @PropertyKey(STORAGE_BUFFERCACHE_PAGESIZE_KEY)
public int getBufferCachePageSize() {
- return accessor.getProperty(STORAGE_BUFFERCACHE_PAGESIZE_KEY, STORAGE_BUFFERCACHE_PAGESIZE_DEFAULT,
- PropertyInterpreters.getIntegerBytePropertyInterpreter());
+ return accessor.getInt(Option.STORAGE_BUFFERCACHE_PAGESIZE);
}
- @PropertyKey(STORAGE_BUFFERCACHE_SIZE_KEY)
public long getBufferCacheSize() {
- return accessor.getProperty(STORAGE_BUFFERCACHE_SIZE_KEY, storageBufferCacheSizeDefault,
- PropertyInterpreters.getLongBytePropertyInterpreter());
+ return accessor.getLong(Option.STORAGE_BUFFERCACHE_SIZE);
+ }
+
+ public int getBufferCacheMaxOpenFiles() {
+ return accessor.getInt(Option.STORAGE_BUFFERCACHE_MAXOPENFILES);
+ }
+
+ public int getMemoryComponentPageSize() {
+ return accessor.getInt(Option.STORAGE_MEMORYCOMPONENT_PAGESIZE);
+ }
+
+ public int getMemoryComponentNumPages() {
+ return accessor.getInt(Option.STORAGE_MEMORYCOMPONENT_NUMPAGES);
+ }
+
+ public int getMetadataMemoryComponentNumPages() {
+ return accessor.getInt(Option.STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES);
+ }
+
+ public int getMemoryComponentsNum() {
+ return accessor.getInt(Option.STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS);
+ }
+
+ public long getMemoryComponentGlobalBudget() {
+ return accessor.getLong(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET);
+ }
+
+ public double getBloomFilterFalsePositiveRate() {
+ return accessor.getDouble(Option.STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE);
}
public int getBufferCacheNumPages() {
return (int) (getBufferCacheSize() / (getBufferCachePageSize() + IBufferCache.RESERVED_HEADER_BYTES));
}
-
- @PropertyKey(STORAGE_BUFFERCACHE_MAXOPENFILES_KEY)
- public int getBufferCacheMaxOpenFiles() {
- return accessor.getProperty(STORAGE_BUFFERCACHE_MAXOPENFILES_KEY, STORAGE_BUFFERCACHE_MAXOPENFILES_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(STORAGE_MEMORYCOMPONENT_PAGESIZE_KEY)
- public int getMemoryComponentPageSize() {
- return accessor.getProperty(STORAGE_MEMORYCOMPONENT_PAGESIZE_KEY, STORAGE_MEMORYCOMPONENT_PAGESIZE_DEFAULT,
- PropertyInterpreters.getIntegerBytePropertyInterpreter());
- }
-
- @PropertyKey(STORAGE_MEMORYCOMPONENT_NUMPAGES_KEY)
- public int getMemoryComponentNumPages() {
- return accessor.getProperty(STORAGE_MEMORYCOMPONENT_NUMPAGES_KEY, storageMemoryComponentNumPages,
- PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_KEY)
- public int getMetadataMemoryComponentNumPages() {
- return accessor.getProperty(STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES_KEY,
- storageMetadataMemoryComponentNumPages, PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS_KEY)
- public int getMemoryComponentsNum() {
- return accessor.getProperty(STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS_KEY,
- STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS_DEFAULT, PropertyInterpreters.getIntegerPropertyInterpreter());
- }
-
- @PropertyKey(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_KEY)
- public long getMemoryComponentGlobalBudget() {
- return accessor.getProperty(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET_KEY, storageMemorycomponentGlobalbudgetDefault,
- PropertyInterpreters.getLongBytePropertyInterpreter());
- }
-
- @PropertyKey(STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY)
- public double getBloomFilterFalsePositiveRate() {
- return accessor.getProperty(STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_KEY,
- STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE_DEFAULT, PropertyInterpreters.getDoublePropertyInterpreter());
- }
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
index e4fd71d..73e8c4a 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
@@ -18,50 +18,65 @@
*/
package org.apache.asterix.common.config;
-import static org.apache.hyracks.util.StorageUtil.StorageUnit.KILOBYTE;
-import static org.apache.hyracks.util.StorageUtil.StorageUnit.MEGABYTE;
+import static org.apache.hyracks.control.common.config.OptionTypes.*;
+import static org.apache.hyracks.util.StorageUtil.StorageUnit.*;
import java.util.Map;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.util.StorageUtil;
public class TransactionProperties extends AbstractProperties {
- private static final String TXN_LOG_BUFFER_NUMPAGES_KEY = "txn.log.buffer.numpages";
- private static final int TXN_LOG_BUFFER_NUMPAGES_DEFAULT = 8;
+ public enum Option implements IOption {
+ TXN_LOG_BUFFER_NUMPAGES(INTEGER, 8),
+ TXN_LOG_BUFFER_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128, KILOBYTE)),
+ TXN_LOG_PARTITIONSIZE(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(256L, MEGABYTE)),
+ TXN_LOG_CHECKPOINT_LSNTHRESHOLD(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(64, MEGABYTE)),
+ TXN_LOG_CHECKPOINT_POLLFREQUENCY(INTEGER, 120),
+ TXN_LOG_CHECKPOINT_HISTORY(INTEGER, 0),
+ TXN_LOCK_ESCALATIONTHRESHOLD(INTEGER, 1000),
+ TXN_LOCK_SHRINKTIMER(INTEGER, 5000),
+ TXN_LOCK_TIMEOUT_WAITTHRESHOLD(INTEGER, 60000),
+ TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD(INTEGER, 10000),
+ TXN_COMMITPROFILER_REPORTINTERVAL(INTEGER, 5),
+ TXN_JOB_RECOVERY_MEMORYSIZE(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(64L, MEGABYTE));
- private static final String TXN_LOG_BUFFER_PAGESIZE_KEY = "txn.log.buffer.pagesize";
- private static final int TXN_LOG_BUFFER_PAGESIZE_DEFAULT = StorageUtil.getSizeInBytes(128, KILOBYTE);
+ private final IOptionType type;
+ private final Object defaultValue;
- public static final String TXN_LOG_PARTITIONSIZE_KEY = "txn.log.partitionsize";
- private static final long TXN_LOG_PARTITIONSIZE_DEFAULT = StorageUtil.getSizeInBytes(256L, MEGABYTE);
+ Option(IOptionType type, Object defaultValue) {
+ this.type = type;
+ this.defaultValue = defaultValue;
+ }
- private static final String TXN_LOG_CHECKPOINT_LSNTHRESHOLD_KEY = "txn.log.checkpoint.lsnthreshold";
- private static final int TXN_LOG_CHECKPOINT_LSNTHRESHOLD_DEFAULT = StorageUtil.getSizeInBytes(64, MEGABYTE);
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
- public static final String TXN_LOG_CHECKPOINT_POLLFREQUENCY_KEY = "txn.log.checkpoint.pollfrequency";
- private static final int TXN_LOG_CHECKPOINT_POLLFREQUENCY_DEFAULT = 120; // 120s
+ @Override
+ public String description() {
+ // TODO(mblow): add missing descriptions
+ return null;
+ }
- private static final String TXN_LOG_CHECKPOINT_HISTORY_KEY = "txn.log.checkpoint.history";
- private static final int TXN_LOG_CHECKPOINT_HISTORY_DEFAULT = 0;
+ @Override
+ public IOptionType type() {
+ return type;
+ }
- private static final String TXN_LOCK_ESCALATIONTHRESHOLD_KEY = "txn.lock.escalationthreshold";
- private static final int TXN_LOCK_ESCALATIONTHRESHOLD_DEFAULT = 1000;
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+ }
- private static final String TXN_LOCK_SHRINKTIMER_KEY = "txn.lock.shrinktimer";
- private static final int TXN_LOCK_SHRINKTIMER_DEFAULT = 5000; // 5s
+ public static final String TXN_LOG_PARTITIONSIZE_KEY = Option.TXN_LOG_PARTITIONSIZE.ini();
- private static final String TXN_LOCK_TIMEOUT_WAITTHRESHOLD_KEY = "txn.lock.timeout.waitthreshold";
- private static final int TXN_LOCK_TIMEOUT_WAITTHRESHOLD_DEFAULT = 60000; // 60s
-
- private static final String TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD_KEY = "txn.lock.timeout.sweepthreshold";
- private static final int TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD_DEFAULT = 10000; // 10s
-
- private static final String TXN_COMMIT_PROFILER_REPORT_INTERVAL_KEY = "txn.commitprofiler.reportinterval";
- private static final int TXN_COMMIT_PROFILER_REPORT_INTERVAL_DEFAULT = 5; // 5 seconds
-
- private static final String TXN_JOB_RECOVERY_MEMORY_SIZE_KEY = "txn.job.recovery.memorysize";
- private static final long TXN_JOB_RECOVERY_MEMORY_SIZE_DEFAULT = StorageUtil.getSizeInBytes(64L, MEGABYTE);
+ public static final String TXN_LOG_CHECKPOINT_POLLFREQUENCY_KEY = Option.TXN_LOG_CHECKPOINT_POLLFREQUENCY.ini();
public TransactionProperties(PropertiesAccessor accessor) {
super(accessor);
@@ -75,75 +90,51 @@
return accessor.getTransactionLogDirs();
}
- @PropertyKey(TXN_LOG_BUFFER_NUMPAGES_KEY)
public int getLogBufferNumPages() {
- return accessor.getProperty(TXN_LOG_BUFFER_NUMPAGES_KEY, TXN_LOG_BUFFER_NUMPAGES_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOG_BUFFER_NUMPAGES);
}
- @PropertyKey(TXN_LOG_BUFFER_PAGESIZE_KEY)
public int getLogBufferPageSize() {
- return accessor.getProperty(TXN_LOG_BUFFER_PAGESIZE_KEY, TXN_LOG_BUFFER_PAGESIZE_DEFAULT,
- PropertyInterpreters.getIntegerBytePropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOG_BUFFER_PAGESIZE);
}
- @PropertyKey(TXN_LOG_PARTITIONSIZE_KEY)
public long getLogPartitionSize() {
- return accessor.getProperty(TXN_LOG_PARTITIONSIZE_KEY, TXN_LOG_PARTITIONSIZE_DEFAULT,
- PropertyInterpreters.getLongBytePropertyInterpreter());
+ return accessor.getLong(Option.TXN_LOG_PARTITIONSIZE);
}
- @PropertyKey(TXN_LOG_CHECKPOINT_LSNTHRESHOLD_KEY)
public int getCheckpointLSNThreshold() {
- return accessor.getProperty(TXN_LOG_CHECKPOINT_LSNTHRESHOLD_KEY, TXN_LOG_CHECKPOINT_LSNTHRESHOLD_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOG_CHECKPOINT_LSNTHRESHOLD);
}
- @PropertyKey(TXN_LOG_CHECKPOINT_POLLFREQUENCY_KEY)
public int getCheckpointPollFrequency() {
- return accessor.getProperty(TXN_LOG_CHECKPOINT_POLLFREQUENCY_KEY, TXN_LOG_CHECKPOINT_POLLFREQUENCY_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOG_CHECKPOINT_POLLFREQUENCY);
}
- @PropertyKey(TXN_LOG_CHECKPOINT_HISTORY_KEY)
public int getCheckpointHistory() {
- return accessor.getProperty(TXN_LOG_CHECKPOINT_HISTORY_KEY, TXN_LOG_CHECKPOINT_HISTORY_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOG_CHECKPOINT_HISTORY);
}
- @PropertyKey(TXN_LOCK_ESCALATIONTHRESHOLD_KEY)
public int getEntityToDatasetLockEscalationThreshold() {
- return accessor.getProperty(TXN_LOCK_ESCALATIONTHRESHOLD_KEY, TXN_LOCK_ESCALATIONTHRESHOLD_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOCK_ESCALATIONTHRESHOLD);
}
- @PropertyKey(TXN_LOCK_SHRINKTIMER_KEY)
public int getLockManagerShrinkTimer() {
- return accessor.getProperty(TXN_LOCK_SHRINKTIMER_KEY, TXN_LOCK_SHRINKTIMER_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOCK_SHRINKTIMER);
}
- @PropertyKey(TXN_LOCK_TIMEOUT_WAITTHRESHOLD_KEY)
public int getTimeoutWaitThreshold() {
- return accessor.getProperty(TXN_LOCK_TIMEOUT_WAITTHRESHOLD_KEY, TXN_LOCK_TIMEOUT_WAITTHRESHOLD_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOCK_TIMEOUT_WAITTHRESHOLD);
}
- @PropertyKey(TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD_KEY)
public int getTimeoutSweepThreshold() {
- return accessor.getProperty(TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD_KEY, TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD_DEFAULT,
- PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD);
}
- @PropertyKey(TXN_COMMIT_PROFILER_REPORT_INTERVAL_KEY)
public int getCommitProfilerReportInterval() {
- return accessor.getProperty(TXN_COMMIT_PROFILER_REPORT_INTERVAL_KEY,
- TXN_COMMIT_PROFILER_REPORT_INTERVAL_DEFAULT, PropertyInterpreters.getIntegerPropertyInterpreter());
+ return accessor.getInt(Option.TXN_COMMITPROFILER_REPORTINTERVAL);
}
- @PropertyKey(TXN_JOB_RECOVERY_MEMORY_SIZE_KEY)
public long getJobRecoveryMemorySize() {
- return accessor.getProperty(TXN_JOB_RECOVERY_MEMORY_SIZE_KEY, TXN_JOB_RECOVERY_MEMORY_SIZE_DEFAULT,
- PropertyInterpreters.getLongBytePropertyInterpreter());
+ return accessor.getLong(Option.TXN_JOB_RECOVERY_MEMORYSIZE);
}
}
diff --git a/asterixdb/asterix-common/src/main/resources/schema/cluster.xsd b/asterixdb/asterix-common/src/main/resources/schema/cluster.xsd
index 098b4e7..8cd7b42 100644
--- a/asterixdb/asterix-common/src/main/resources/schema/cluster.xsd
+++ b/asterixdb/asterix-common/src/main/resources/schema/cluster.xsd
@@ -52,7 +52,6 @@
<xs:element name="heartbeat_period" type="xs:integer" />
<xs:element name="max_heartbeat_lapse_periods" type="xs:integer" />
<xs:element name="profile_dump_period" type="xs:integer" />
- <xs:element name="default_max_job_attempts" type="xs:integer" />
<xs:element name="job_history_size" type="xs:integer" />
<xs:element name="result_time_to_live" type="xs:long" />
<xs:element name="result_sweep_threshold" type="xs:long" />
@@ -185,7 +184,6 @@
<xs:element ref="cl:heartbeat_period" minOccurs="0" />
<xs:element ref="cl:max_heartbeat_lapse_periods" minOccurs="0" />
<xs:element ref="cl:profile_dump_period" minOccurs="0" />
- <xs:element ref="cl:default_max_job_attempts" minOccurs="0" />
<xs:element ref="cl:job_history_size" minOccurs="0" />
<xs:element ref="cl:result_time_to_live" minOccurs="0" />
<xs:element ref="cl:result_sweep_threshold" minOccurs="0" />
diff --git a/asterixdb/asterix-doc/src/site/markdown/ncservice.md b/asterixdb/asterix-doc/src/site/markdown/ncservice.md
index 3215183..67b1970 100644
--- a/asterixdb/asterix-doc/src/site/markdown/ncservice.md
+++ b/asterixdb/asterix-doc/src/site/markdown/ncservice.md
@@ -126,25 +126,25 @@
`cc.conf`:
[nc/red]
- txnlogdir=/tmp/asterix/red/txnlog
- coredumpdir=/tmp/asterix/red/coredump
+ txn.log.dir=/tmp/asterix/red/txnlog
+ core.dump.dir=/tmp/asterix/red/coredump
iodevices=/tmp/asterix/red
[nc/blue]
port=9091
- txnlogdir=/tmp/asterix/blue/txnlog
- coredumpdir=/tmp/asterix/blue/coredump
+ txn.log.dir=/tmp/asterix/blue/txnlog
+ core.dump.dir=/tmp/asterix/blue/coredump
iodevices=/tmp/asterix/blue
[nc]
app.class=org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint
- storagedir=storage
+ storage.subdir=storage
address=127.0.0.1
command=asterixnc
[cc]
- cluster.address = 127.0.0.1
- http.port = 12345
+ address = 127.0.0.1
+ console.listen.port = 12345
This is the configuration file for the cluster and it contains information
that each `NCService` will use when starting the corresponding `NCDriver` as
@@ -257,10 +257,8 @@
| Parameter | Meaning | Default |
|----------|--------|-------|
-| instance.name | The name of the AsterixDB instance | "DEFAULT_INSTANCE" |
| max.wait.active.cluster | The max pending time (in seconds) for cluster startup. After the threshold, if the cluster still is not up and running, it is considered unavailable. | 60 |
-| metadata.callback.port | The port for metadata communication | 0 |
-| cluster.address | The binding IP address for the AsterixDB instance | N/A |
+| address | The binding IP address for the AsterixDB instance | N/A |
The following parameters for slave processes, under "[nc]" sections.
@@ -269,13 +267,28 @@
|----------|--------|-------|
| address | The binding IP address for the slave process | N/A |
| command | The command for the slave process | N/A (for AsterixDB, it should be "asterixnc") |
-| coredumpdir | The path for core dump | N/A |
+| core.dump.dir | The path for core dump | N/A |
| iodevices | Comma separated directory paths for both storage files and temporary files | N/A |
| jvm.args | The JVM arguments | -Xmx1536m |
-| metadata.port | The metadata communication port on the metadata node. This parameter should only be present in the section of the metadata NC | 0 |
+| ncservice.port | The port on which the NCService for this NC is listening | 9090 |
+| txn.log.dir | The directory for transaction logs | N/A |
+
+
+The following parameters are configured under the "[common]" section.
+
+| Parameter | Meaning | Default |
+|----------|--------|-------|
+| instance.name | The name of the AsterixDB instance | "DEFAULT_INSTANCE" |
+| log.level | The logging level for master and slave processes | "INFO" |
+| compiler.framesize | The page size (in bytes) for computation | 32768 |
+| compiler.groupmemory | The memory budget (in bytes) for a group by operator instance in a partition | 33554432 |
+| compiler.joinmemory | The memory budget (in bytes) for a join operator instance in a partition | 33554432 |
+| compiler.sortmemory | The memory budget (in bytes) for a sort operator instance in a partition | 33554432 |
+| compiler.parallelism | The degree of parallelism for query execution. Zero means to use the storage parallelism as the query execution parallelism, while other integer values dictate the number of query execution parallel partitions. The system will fall back to use the number of all available CPU cores in the cluster as the degree of parallelism if the number set by a user is too large or too small. | 0 |
+| metadata.callback.port | The port for metadata communication | 0 |
+| metadata.listen.port | The metadata communication port on the metadata node. This parameter should only be present in the section of the metadata NC | 0 |
| metadata.registration.timeout.secs | The time out threshold (in seconds) for metadata node registration | 60 |
-| port | The port for the NCService that starts the slave process | N/A |
-| storagedir | The directory for storage files | N/A |
+| storage.subdir | The directory for storage files | N/A |
| storage.buffercache.maxopenfiles | The maximum number of open files for the buffer cache. Note that this is the parameter for the AsterixDB and setting the operating system parameter is still required. | 2147483647 |
| storage.buffercache.pagesize | The page size (in bytes) for the disk buffer cache (for reads) | 131072 |
| storage.buffercache.size | The overall budget (in bytes) of the disk buffer cache (for reads) | 536870912 |
@@ -285,7 +298,6 @@
| storage.memorycomponent.numpages | The number of pages for all memory components of a dataset, including those for secondary indexes | 256 |
| storage.memorycomponent.pagesize | The page size (in bytes) of memory components | 131072 |
| storage.metadata.memorycomponent.numpages | The number of pages for all memory components of a metadata dataset | 256 |
-| txnlogdir | The directory for transaction logs | N/A |
| txn.commitprofiler.reportinterval | The interval for reporting commit statistics | 5 |
| txn.job.recovery.memorysize | The memory budget (in bytes) used for recovery | 67108864 |
| txn.lock.timeout.sweepthreshold | Interval (in milliseconds) for checking lock timeout | 10000 |
@@ -296,13 +308,10 @@
| txn.log.checkpoint.lsnthreshold | The checkpoint threshold (in terms of LSNs (log sequence numbers) that have been written to the transaction log, i.e., the length of the transaction log) for transection logs | 67108864 |
-The following parameter is for both master and slave processes, under the "[app]" section.
+# For the optional NCService process configuration file, the following parameters, under "[ncservice]" section.
| Parameter | Meaning | Default |
|----------|--------|-------|
-| log.level | The logging level for master and slave processes | "INFO" |
-| compiler.framesize | The page size (in bytes) for computation | 32768 |
-| compiler.groupmemory | The memory budget (in bytes) for a group by operator instance in a partition | 33554432 |
-| compiler.joinmemory | The memory budget (in bytes) for a join operator instance in a partition | 33554432 |
-| compiler.sortmemory | The memory budget (in bytes) for a sort operator instance in a partition | 33554432 |
-| compiler.parallelism | The degree of parallelism for query execution. Zero means to use the storage parallelism as the query execution parallelism, while other integer values dictate the number of query execution parallel partitions. The system will fall back to use the number of all available CPU cores in the cluster as the degree of parallelism if the number set by a user is too large or too small. | 0 |
+| address | The address the NCService listens on for commands from the CC | (all addresses) |
+| port | The port for the NCService listens on for commands from the CC | 9090 |
+| logdir | Directory where NCService logs should be written ('-' indicates that output should go to stdout) | ${app.home}/logs (${user.home} if 'app.home' not present in NCService Java system properties. |
diff --git a/asterixdb/asterix-docker/docker/supervisord.conf b/asterixdb/asterix-docker/docker/supervisord.conf
index 9f70dd1..b6ce1e0 100644
--- a/asterixdb/asterix-docker/docker/supervisord.conf
+++ b/asterixdb/asterix-docker/docker/supervisord.conf
@@ -19,21 +19,21 @@
nodaemon=true
[program:asterixcc]
-command=/asterixdb/bin/asterixcc -cluster-net-ip-address localhost -client-net-ip-address localhost
+command=/asterixdb/bin/asterixcc -address localhost -client-listen-address localhost
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
[program:asterixnc1]
-command=/asterixdb/bin/asterixnc -node-id nc1 -cc-host localhost -iodevices /asterixdb/nc1 -cluster-net-ip-address localhost -data-ip-address localhost -result-ip-address localhost --
+command=/asterixdb/bin/asterixnc -node-id nc1 -cluster-address localhost -iodevices /asterixdb/nc1 -address localhost -data-listen-address localhost -result-listen-address localhost
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
[program:asterixnc2]
-command=/asterixdb/bin/asterixnc -node-id nc2 -cc-host localhost -iodevices /asterixdb/nc2 -cluster-net-ip-address localhost -data-ip-address localhost -result-ip-address localhost --
+command=/asterixdb/bin/asterixnc -node-id nc2 -cluster-address localhost -iodevices /asterixdb/nc2 -address localhost -data-listen-address localhost -result-listen-address localhost
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
diff --git a/asterixdb/asterix-events/src/main/java/org/apache/asterix/event/service/AsterixEventServiceUtil.java b/asterixdb/asterix-events/src/main/java/org/apache/asterix/event/service/AsterixEventServiceUtil.java
index b6cc339..b48e311 100644
--- a/asterixdb/asterix-events/src/main/java/org/apache/asterix/event/service/AsterixEventServiceUtil.java
+++ b/asterixdb/asterix-events/src/main/java/org/apache/asterix/event/service/AsterixEventServiceUtil.java
@@ -23,7 +23,6 @@
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
@@ -157,10 +156,6 @@
clusterProperties.add(
new Property("PROFILE_DUMP_PERIOD", String.valueOf(cluster.getProfileDumpPeriod().intValue())));
}
- if (cluster.getDefaultMaxJobAttempts() != null) {
- clusterProperties.add(new Property("DEFAULT_MAX_JOB_ATTEMPTS",
- String.valueOf(cluster.getDefaultMaxJobAttempts().intValue())));
- }
if (cluster.getJobHistorySize() != null) {
clusterProperties
.add(new Property("JOB_HISTORY_SIZE", String.valueOf(cluster.getJobHistorySize().intValue())));
@@ -288,15 +283,13 @@
}
configuration.setStore(stores);
List<Coredump> coredump = new ArrayList<Coredump>();
- String coredumpDir = null;
List<TransactionLogDir> txnLogDirs = new ArrayList<TransactionLogDir>();
- String txnLogDir = null;
for (Node node : cluster.getNode()) {
- coredumpDir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
+ String coredumpdir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
coredump.add(new Coredump(asterixInstanceName + "_" + node.getId(),
- coredumpDir + File.separator + asterixInstanceName + "_" + node.getId()));
+ coredumpdir + File.separator + asterixInstanceName + "_" + node.getId()));
- txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
+ String txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir();
txnLogDirs.add(new TransactionLogDir(asterixInstanceName + "_" + node.getId(), txnLogDir));
}
configuration.setCoredump(coredump);
diff --git a/asterixdb/asterix-events/src/main/resources/events/cc_start/cc_start.sh b/asterixdb/asterix-events/src/main/resources/events/cc_start/cc_start.sh
index c33ba6c..3950547 100644
--- a/asterixdb/asterix-events/src/main/resources/events/cc_start/cc_start.sh
+++ b/asterixdb/asterix-events/src/main/resources/events/cc_start/cc_start.sh
@@ -18,14 +18,14 @@
# under the License.
if [ ! -d $LOG_DIR ];
-then
+then
mkdir -p $LOG_DIR
fi
-ccArgs='-client-net-ip-address '$CLIENT_NET_IP
-ccArgs=$ccArgs' -client-net-port '$CLIENT_NET_PORT
-ccArgs=$ccArgs' -cluster-net-ip-address '$CLUSTER_NET_IP
-ccArgs=$ccArgs' -cluster-net-port '$CLUSTER_NET_PORT
-ccArgs=$ccArgs' -http-port '$HTTP_PORT
+ccArgs='-client-listen-address '$CLIENT_NET_IP
+ccArgs=$ccArgs' -client-listen-port '$CLIENT_NET_PORT
+ccArgs=$ccArgs' -address '$CLUSTER_NET_IP
+ccArgs=$ccArgs' -cluster-listen-port '$CLUSTER_NET_PORT
+ccArgs=$ccArgs' -console-listen-port '$HTTP_PORT
if [ ! -z $HEARTBEAT_PERIOD ]
then
@@ -34,7 +34,7 @@
if [ ! -z $MAX_HEARTBEAT_LAPSE_PERIODS ]
then
-ccArgs=$ccArgs' -max-heartbeat-lapse-periods '$MAX_HEARTBEAT_LAPSE_PERIODS
+ccArgs=$ccArgs' -heartbeat-max-misses '$MAX_HEARTBEAT_LAPSE_PERIODS
fi
if [ ! -z $PROFILE_DUMP_PERIOD ]
@@ -42,11 +42,6 @@
ccArgs=$ccArgs' -profile-dump-period '$PROFILE_DUMP_PERIOD
fi
-if [ ! -z $DEFAULT_MAX_JOB_ATTEMPTS ]
-then
-ccArgs=$ccArgs' -default-max-job-attempts '$DEFAULT_MAX_JOB_ATTEMPTS
-fi
-
if [ ! -z $JOB_HISTORY_SIZE ]
then
ccArgs=$ccArgs' -job-history-size '$JOB_HISTORY_SIZE
@@ -54,7 +49,7 @@
if [ ! -z $RESULT_TIME_TO_LIVE ]
then
-ccArgs=$ccArgs' "-result-time-to-live '$RESULT_TIME_TO_LIVE
+ccArgs=$ccArgs' -result-ttl '$RESULT_TIME_TO_LIVE
fi
if [ ! -z $RESULT_SWEEP_THRESHOLD ]
@@ -64,14 +59,17 @@
if [ ! -z $CC_ROOT ]
then
-ccArgs=$ccArgs' -cc-root '$CC_ROOT
+ccArgs=$ccArgs' -root-dir '$CC_ROOT
fi
cd $WORKING_DIR
-DATE=`date`
+# TODO(mblow): ensure logging configuration is correct here...
+# export JAVA_OPTS="$JAVA_OPTS -Dlog4j.configuration=file:$WORKING_DIR/conf/log4j.properties"
+DATE=`date`
cat <<EOF >> $LOG_DIR/cc.log
--------------------------------------------------------------------------------
LOG START: $DATE
+JAVA_OPTS: $JAVA_OPTS
--------------------------------------------------------------------------------
EOF
-$ASTERIX_HOME/bin/asterixcc echo $ccArgs >> $LOG_DIR/cc.log 2>&1
\ No newline at end of file
+$ASTERIX_HOME/bin/asterixcc $ccArgs >> $LOG_DIR/cc.log 2>&1
\ No newline at end of file
diff --git a/asterixdb/asterix-events/src/main/resources/events/node_join/nc_join.sh b/asterixdb/asterix-events/src/main/resources/events/node_join/nc_join.sh
index 7f86379..626739a 100644
--- a/asterixdb/asterix-events/src/main/resources/events/node_join/nc_join.sh
+++ b/asterixdb/asterix-events/src/main/resources/events/node_join/nc_join.sh
@@ -21,8 +21,8 @@
NC_ID=$2
IO_DEVICES=$3
INITIAL_RUN_FLAG=$4
-if [ ! -d $LOG_DIR ];
-then
+if [ ! -d $LOG_DIR ];
+then
mkdir -p $LOG_DIR
fi
@@ -32,6 +32,7 @@
cat <<EOF >> $LOG_DIR/${NC_ID}.log
--------------------------------------------------------------------------------
LOG START: $DATE
+JAVA_OPTS: $JAVA_OPTS
--------------------------------------------------------------------------------
EOF
-$ASTERIX_HOME/bin/asterixnc -node-id $NC_ID -cc-host $CC_HOST -cc-port $CLUSTER_NET_PORT -cluster-net-ip-address $IP_LOCATION -data-ip-address $IP_LOCATION -iodevices $IO_DEVICES -result-ip-address $IP_LOCATION -- $INITIAL_RUN_FLAG >> $LOG_DIR/${NC_ID}.log 2>&1
+$ASTERIX_HOME/bin/asterixnc -node-id $NC_ID -cluster-address $CC_HOST -cluster-port $CLUSTER_NET_PORT -address $IP_LOCATION -data-listen-address $IP_LOCATION -iodevices $IO_DEVICES -result-listen-address $IP_LOCATION $INITIAL_RUN_FLAG >> $LOG_DIR/${NC_ID}.log 2>&1
diff --git a/asterixdb/asterix-events/src/main/resources/events/node_restart/nc_restart.sh b/asterixdb/asterix-events/src/main/resources/events/node_restart/nc_restart.sh
index b3b5303..5ed2f7e 100644
--- a/asterixdb/asterix-events/src/main/resources/events/node_restart/nc_restart.sh
+++ b/asterixdb/asterix-events/src/main/resources/events/node_restart/nc_restart.sh
@@ -23,18 +23,18 @@
if [ $NC_ID == 'ANY' ]
then
- NC_ID="."
- PARENT_ID=`ps -ej | tr -s " " | grep nc_join | grep -v grep | grep -v ssh | cut -d " " -f2 | head -n 1`
+ NC_ID="."
+ PARENT_ID=`ps -ej | tr -s " " | grep nc_join | grep -v grep | grep -v ssh | cut -d " " -f2 | head -n 1`
PARENT_PROCESS_ENTRY=`ps -ef | grep $PARENT_ID | grep -v grep | head -n 1`
NC_ID=`echo ${PARENT_PROCESS_ENTRY##* }`
echo "NCid is $NC_ID" >> ~/try.txt
-else
- PARENT_ID=`ps -ej | tr -s " " | grep nc_join | grep -v grep | grep -v ssh | grep $NC_ID | cut -d " " -f2 | head -n 1`
-fi
+else
+ PARENT_ID=`ps -ej | tr -s " " | grep nc_join | grep -v grep | grep -v ssh | grep $NC_ID | cut -d " " -f2 | head -n 1`
+fi
PID=`ps -ej | tr -s " " | grep hyracks | grep -v grep | grep -v nc_join | grep $PARENT_ID | cut -d " " -f2 | head -n 1`
kill -9 $PID
sleep $3
-$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.2.2-SNAPSHOT-binary-assembly/bin/hyracksnc -node-id $NC_ID -cc-host $CC_HOST -cc-port 1099 -cluster-net-ip-address $IP_LOCATION -data-ip-address $IP_LOCATION
+$HYRACKS_HOME/hyracks-server/target/hyracks-server-0.2.2-SNAPSHOT-binary-assembly/bin/hyracksnc -node-id $NC_ID -cluster-address $CC_HOST -cluster-port 1099 -address $IP_LOCATION -data-listen-address $IP_LOCATION
diff --git a/asterixdb/asterix-installer/pom.xml b/asterixdb/asterix-installer/pom.xml
index df36c80..8b200bb 100644
--- a/asterixdb/asterix-installer/pom.xml
+++ b/asterixdb/asterix-installer/pom.xml
@@ -260,7 +260,7 @@
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<configuration>
- <usedDependencies>
+ <usedDependencies combine.children="append">
<usedDependency>org.apache.asterix:asterix-external-data</usedDependency>
<usedDependency>org.apache.asterix:asterix-server</usedDependency>
<usedDependency>org.apache.hadoop:hadoop-common</usedDependency>
@@ -478,5 +478,10 @@
<artifactId>hadoop-hdfs</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-util</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-installer/src/main/resources/clusters/local/local.xml b/asterixdb/asterix-installer/src/main/resources/clusters/local/local.xml
index bd3bde0..f2afe5e 100644
--- a/asterixdb/asterix-installer/src/main/resources/clusters/local/local.xml
+++ b/asterixdb/asterix-installer/src/main/resources/clusters/local/local.xml
@@ -32,8 +32,6 @@
<max_heartbeat_lapse_periods>5</max_heartbeat_lapse_periods>
<!-- Sets the time duration between two profile dumps from each node controller in milliseconds. 0 to disable. (default: 0) -->
<profile_dump_period>0</profile_dump_period>
- <!-- Sets the default number of job attempts allowed if not specified in the job specification. (default: 5) -->
- <default_max_job_attempts>5</default_max_job_attempts>
<!-- Limits the number of historical jobs remembered by the system to the specified value. (default: 10) -->
<job_history_size>10</job_history_size>
<!-- Limits the amount of time results for asynchronous jobs should be retained by the system in milliseconds. (default: 24 hours) -->
diff --git a/asterixdb/asterix-installer/src/main/resources/clusters/local/local_chained_declustering_rep.xml b/asterixdb/asterix-installer/src/main/resources/clusters/local/local_chained_declustering_rep.xml
index 954a311..57d04c7 100644
--- a/asterixdb/asterix-installer/src/main/resources/clusters/local/local_chained_declustering_rep.xml
+++ b/asterixdb/asterix-installer/src/main/resources/clusters/local/local_chained_declustering_rep.xml
@@ -32,8 +32,6 @@
<max_heartbeat_lapse_periods>5</max_heartbeat_lapse_periods>
<!-- Sets the time duration between two profile dumps from each node controller in milliseconds. 0 to disable. (default: 0) -->
<profile_dump_period>0</profile_dump_period>
- <!-- Sets the default number of job attempts allowed if not specified in the job specification. (default: 5) -->
- <default_max_job_attempts>5</default_max_job_attempts>
<!-- Limits the number of historical jobs remembered by the system to the specified value. (default: 10) -->
<job_history_size>10</job_history_size>
<!-- Limits the amount of time results for asynchronous jobs should be retained by the system in milliseconds. (default: 24 hours) -->
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
index 03e4f75..9d0a1db 100644
--- a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AbstractExecutionIT.java
@@ -31,6 +31,7 @@
import org.apache.asterix.testframework.xml.TestCase.CompilationUnit;
import org.apache.asterix.testframework.xml.TestGroup;
import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.codehaus.plexus.util.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -62,7 +63,7 @@
private static final List<String> badTestCases = new ArrayList<>();
private static String reportPath =
- new File(StringUtils.join(new String[] { "target", "failsafe-reports" }, File.separator)).getAbsolutePath();
+ new File(FileUtil.joinPath("target", "failsafe-reports")).getAbsolutePath();
@Rule
public TestRule retainLogs = new RetainLogsRule(
@@ -100,14 +101,14 @@
AsterixLifecycleIT.restartInstance();
FileUtils.copyDirectoryStructure(
- new File(StringUtils.join(new String[] { "..", "asterix-app", "data" }, File.separator)),
+ new File(FileUtil.joinPath("..", "asterix-app", "data")),
new File(AsterixInstallerIntegrationUtil.getManagixHome() + "/clusters/local/working_dir/data"));
FileUtils.copyDirectoryStructure(
- new File(StringUtils.join(new String[] { "..", "asterix-app", "target", "data" }, File.separator)),
+ new File(FileUtil.joinPath("..", "asterix-app", "target", "data")),
new File(AsterixInstallerIntegrationUtil.getManagixHome() + "/clusters/local/working_dir/target/data"));
- FileUtils.copyDirectoryStructure(new File(StringUtils.join(new String[] { "target", "data" }, File.separator)),
+ FileUtils.copyDirectoryStructure(new File(FileUtil.joinPath("target", "data")),
new File(AsterixInstallerIntegrationUtil.getManagixHome()
+ "/clusters/local/working_dir/target/data/csv"));
@@ -116,7 +117,7 @@
System.setProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY,
IdentitiyResolverFactory.class.getName());
- reportPath = new File(StringUtils.join(new String[] { "target", "failsafe-reports" }, File.separator))
+ reportPath = new File(FileUtil.joinPath("target", "failsafe-reports"))
.getAbsolutePath();
}
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixExternalLibraryIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixExternalLibraryIT.java
index b3a5dcc..ac5beaf 100644
--- a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixExternalLibraryIT.java
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixExternalLibraryIT.java
@@ -26,7 +26,7 @@
import org.apache.asterix.test.base.RetainLogsRule;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
-import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
@@ -44,7 +44,7 @@
private static final Logger LOGGER = Logger.getLogger(AsterixExternalLibraryIT.class.getName());
private static List<TestCaseContext> testCaseCollection;
private static String reportPath =
- new File(StringUtils.join(new String[] { "target", "failsafe-reports" }, File.separator)).getAbsolutePath();
+ new File(FileUtil.joinPath("target", "failsafe-reports")).getAbsolutePath();
private final TestExecutor testExecutor = new TestExecutor();
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java
index 7cca5cc..ce4de7c 100644
--- a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/AsterixRestartIT.java
@@ -28,7 +28,7 @@
import org.apache.asterix.test.base.RetainLogsRule;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
-import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
@@ -48,8 +48,7 @@
private static final String PATH_BASE = "src/test/resources/integrationts/restart/";
private static final String PATH_ACTUAL = "target" + File.separator + "ittest" + File.separator;
private static final Logger LOGGER = Logger.getLogger(AsterixRestartIT.class.getName());
- private static String reportPath = new File(
- StringUtils.join(new String[] { "target", "failsafe-reports" }, File.separator)).getAbsolutePath();
+ private static String reportPath = new File(FileUtil.joinPath("target", "failsafe-reports")).getAbsolutePath();
private final TestExecutor testExecutor = new TestExecutor();
private TestCaseContext tcCtx;
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
index 9fb1e01..446e5e3 100644
--- a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ClusterExecutionIT.java
@@ -25,6 +25,7 @@
import org.apache.asterix.test.runtime.HDFSCluster;
import org.apache.asterix.testframework.context.TestCaseContext;
import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.codehaus.plexus.util.FileUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -58,7 +59,7 @@
AsterixClusterLifeCycleIT.setUp();
FileUtils.copyDirectoryStructure(
- new File(StringUtils.join(new String[] { "..", "asterix-app", "data" }, File.separator)),
+ new File(FileUtil.joinPath("..", "asterix-app", "data")),
new File(StringUtils.join(
new String[] { "src", "test", "resources", "clusterts", "managix-working", "data" },
File.separator)));
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ReplicationIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ReplicationIT.java
index bac93e4..44ce7f8 100644
--- a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ReplicationIT.java
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/test/ReplicationIT.java
@@ -28,7 +28,7 @@
import org.apache.asterix.test.base.RetainLogsRule;
import org.apache.asterix.test.common.TestExecutor;
import org.apache.asterix.testframework.context.TestCaseContext;
-import org.apache.commons.lang3.StringUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -41,11 +41,12 @@
@RunWith(Parameterized.class)
public class ReplicationIT {
- private static final String PATH_BASE = "src/test/resources/integrationts/replication/";
- private static final String PATH_ACTUAL = "target" + File.separator + "ittest" + File.separator;
+ private static final String PATH_BASE = FileUtil.joinPath("src", "test", "resources", "integrationts",
+ "replication");
+ private static final String PATH_ACTUAL = FileUtil.joinPath("target", "ittest");
private static final Logger LOGGER = Logger.getLogger(ReplicationIT.class.getName());
private static String reportPath = new File(
- StringUtils.join(new String[] { "target", "failsafe-reports" }, File.separator)).getAbsolutePath();
+ FileUtil.joinPath("target", "failsafe-reports")).getAbsolutePath();
private final TestExecutor testExecutor = new TestExecutor();
private TestCaseContext tcCtx;
@@ -67,9 +68,8 @@
pb = new ProcessBuilder();
env = pb.environment();
asterixInstallerPath = new File(System.getProperty("user.dir"));
- scriptHomePath = asterixInstallerPath + File.separator + "src" + File.separator + "test" + File.separator
- + "resources" + File.separator + "integrationts" + File.separator + "replication" + File.separator
- + "scripts";
+ scriptHomePath = FileUtil.joinPath(asterixInstallerPath.getPath(), "src", "test", "resources",
+ "integrationts", "replication", "scripts");
env.put("SCRIPT_HOME", scriptHomePath);
} catch (Throwable th) {
th.printStackTrace();
@@ -81,7 +81,7 @@
public void before() throws Exception {
LOGGER.info("Creating new instance...");
AsterixInstallerIntegrationUtil.init(AsterixInstallerIntegrationUtil.LOCAL_CLUSTER_CHAINED_DECLUSTERING_REP_PATH);
- LOGGER.info("Instacne created.");
+ LOGGER.info("Instance created.");
AsterixInstallerIntegrationUtil.transformIntoRequiredState(State.ACTIVE);
LOGGER.info("Instance is in ACTIVE state.");
}
diff --git a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
index 804848e..a0612cc 100644
--- a/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
+++ b/asterixdb/asterix-installer/src/test/java/org/apache/asterix/installer/transaction/RecoveryIT.java
@@ -106,7 +106,7 @@
HDFSCluster.getInstance().cleanup();
}
- @Parameters
+ @Parameters(name = "RecoveryIT {index}: {0}")
public static Collection<Object[]> tests() throws Exception {
Collection<Object[]> testArgs = new ArrayList<Object[]>();
TestCaseContext.Builder b = new TestCaseContext.Builder();
diff --git a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml
index 0064565..c1abade 100644
--- a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml
+++ b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/pom.xml
@@ -97,7 +97,7 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
- <usedDependencies>
+ <usedDependencies combine.children="append">
<usedDependency>org.apache.maven:maven-core</usedDependency>
<usedDependency>org.apache.maven:maven-compat</usedDependency>
</usedDependencies>
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
index 044707a..afc1dfd 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
@@ -95,7 +95,7 @@
private final IReplicationManager replicationManager;
private final ReplicationProperties replicationProperties;
private final IAppRuntimeContextProvider appContextProvider;
- private static final int INTIAL_BUFFER_SIZE = StorageUtil.getSizeInBytes(4, StorageUnit.KILOBYTE);
+ private static final int INTIAL_BUFFER_SIZE = StorageUtil.getIntSizeInBytes(4, StorageUnit.KILOBYTE);
private final LinkedBlockingQueue<LSMComponentLSNSyncTask> lsmComponentRemoteLSN2LocalLSNMappingTaskQ;
private final LinkedBlockingQueue<LogRecord> pendingNotificationRemoteLogsQ;
private final Map<String, LSMComponentProperties> lsmComponentId2PropertiesMap;
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
index 72cc7d1..447021c 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
@@ -117,7 +117,7 @@
private final AtomicBoolean replicationSuspended;
private AtomicBoolean terminateJobsReplication;
private AtomicBoolean jobsReplicationSuspended;
- private static final int INITIAL_BUFFER_SIZE = StorageUtil.getSizeInBytes(4, StorageUnit.KILOBYTE);
+ private static final int INITIAL_BUFFER_SIZE = StorageUtil.getIntSizeInBytes(4, StorageUnit.KILOBYTE);
private final Set<String> shuttingDownReplicaIds;
//replication threads
private ReplicationJobsProccessor replicationJobsProcessor;
diff --git a/asterixdb/asterix-runtime/pom.xml b/asterixdb/asterix-runtime/pom.xml
index 6458fb0..df8f840 100644
--- a/asterixdb/asterix-runtime/pom.xml
+++ b/asterixdb/asterix-runtime/pom.xml
@@ -126,10 +126,13 @@
<artifactId>hyracks-storage-common</artifactId>
</dependency>
<dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-control-common</artifactId>
+ </dependency>
+ <dependency>
<groupId>org.apache.asterix</groupId>
<artifactId>asterix-transactions</artifactId>
<version>${project.version}</version>
- <scope>compile</scope>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
index 6193931..9bcfea2 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
@@ -18,8 +18,24 @@
*/
package org.apache.asterix.runtime.utils;
+import java.io.IOException;
+import java.util.function.Supplier;
+import java.util.logging.Logger;
+
import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
-import org.apache.asterix.common.config.*;
+import org.apache.asterix.common.config.BuildProperties;
+import org.apache.asterix.common.config.CompilerProperties;
+import org.apache.asterix.common.config.ExtensionProperties;
+import org.apache.asterix.common.config.ExternalProperties;
+import org.apache.asterix.common.config.FeedProperties;
+import org.apache.asterix.common.config.IPropertiesProvider;
+import org.apache.asterix.common.config.MessagingProperties;
+import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.config.NodeProperties;
+import org.apache.asterix.common.config.PropertiesAccessor;
+import org.apache.asterix.common.config.ReplicationProperties;
+import org.apache.asterix.common.config.StorageProperties;
+import org.apache.asterix.common.config.TransactionProperties;
import org.apache.asterix.common.dataflow.IApplicationContextInfo;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.library.ILibraryManager;
@@ -31,10 +47,6 @@
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.common.IStorageManager;
-import java.io.IOException;
-import java.util.function.Supplier;
-import java.util.logging.Logger;
-
/*
* Acts as an holder class for IndexRegistryProvider, AsterixStorageManager
* instances that are accessed from the NCs. In addition an instance of ICCApplicationContext
@@ -57,6 +69,7 @@
private ReplicationProperties replicationProperties;
private ExtensionProperties extensionProperties;
private MessagingProperties messagingProperties;
+ private NodeProperties nodeProperties;
private Supplier<IMetadataBootstrap> metadataBootstrapSupplier;
private IHyracksClientConnection hcc;
private Object extensionManager;
@@ -94,6 +107,7 @@
INSTANCE.hcc = hcc;
INSTANCE.buildProperties = new BuildProperties(propertiesAccessor);
INSTANCE.messagingProperties = new MessagingProperties(propertiesAccessor);
+ INSTANCE.nodeProperties = new NodeProperties(propertiesAccessor);
INSTANCE.metadataBootstrapSupplier = metadataBootstrapSupplier;
INSTANCE.globalRecoveryManager = globalRecoveryManager;
@@ -191,6 +205,11 @@
return messagingProperties;
}
+ @Override
+ public NodeProperties getNodeProperties() {
+ return nodeProperties;
+ }
+
public IResourceIdManager getResourceIdManager() {
return resourceIdManager;
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index d975a98..49cbc54 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -37,12 +37,14 @@
import org.apache.asterix.event.schema.cluster.Cluster;
import org.apache.asterix.event.schema.cluster.Node;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.hyracks.control.common.controllers.NCConfig;
/**
* A holder class for properties related to the Asterix cluster.
@@ -57,8 +59,7 @@
private static final Logger LOGGER = Logger.getLogger(ClusterStateManager.class.getName());
public static final ClusterStateManager INSTANCE = new ClusterStateManager();
- private static final String IO_DEVICES = "iodevices";
- private final Map<String, Map<String, String>> activeNcConfiguration = new HashMap<>();
+ private final Map<String, Map<IOption, Object>> activeNcConfiguration = new HashMap<>();
private final Cluster cluster;
private ClusterState state = ClusterState.UNUSABLE;
@@ -95,7 +96,7 @@
ftStrategy.notifyNodeFailure(nodeId);
}
- public synchronized void addNCConfiguration(String nodeId, Map<String, String> configuration)
+ public synchronized void addNCConfiguration(String nodeId, Map<IOption, Object> configuration)
throws HyracksException {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Registering configuration parameters for node id " + nodeId);
@@ -177,7 +178,7 @@
* @return a list of IO devices.
*/
public synchronized String[] getIODevices(String nodeId) {
- Map<String, String> ncConfig = activeNcConfiguration.get(nodeId);
+ Map<IOption, Object> ncConfig = activeNcConfiguration.get(nodeId);
if (ncConfig == null) {
if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Configuration parameters for nodeId " + nodeId
@@ -185,7 +186,7 @@
}
return new String[0];
}
- return ncConfig.get(IO_DEVICES).split(",");
+ return (String [])ncConfig.get(NCConfig.Option.IODEVICES);
}
@Override
@@ -312,7 +313,7 @@
}
@Override
- public Map<String, Map<String, String>> getActiveNcConfiguration() {
+ public Map<String, Map<IOption, Object>> getActiveNcConfiguration() {
return Collections.unmodifiableMap(activeNcConfiguration);
}
diff --git a/asterixdb/asterix-server/pom.xml b/asterixdb/asterix-server/pom.xml
index 019ad04..eef2f35 100644
--- a/asterixdb/asterix-server/pom.xml
+++ b/asterixdb/asterix-server/pom.xml
@@ -239,7 +239,7 @@
<name>asterixcc</name>
<mainClass>org.apache.hyracks.control.cc.CCDriver</mainClass>
<commandLineArguments>
- <commandLineArgument>-app-cc-main-class</commandLineArgument>
+ <commandLineArgument>-app-class</commandLineArgument>
<commandLineArgument>org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint</commandLineArgument>
</commandLineArguments>
</program>
@@ -250,7 +250,7 @@
<name>asterixnc</name>
<mainClass>org.apache.hyracks.control.nc.NCDriver</mainClass>
<commandLineArguments>
- <commandLineArgument>-app-nc-main-class</commandLineArgument>
+ <commandLineArgument>-app-class</commandLineArgument>
<commandLineArgument>org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint</commandLineArgument>
</commandLineArguments>
</program>
@@ -270,7 +270,7 @@
<platform>booter-windows</platform>
</platforms>
<commandLineArguments>
- <commandLineArgument>-app-cc-main-class</commandLineArgument>
+ <commandLineArgument>-app-class</commandLineArgument>
<commandLineArgument>org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint</commandLineArgument>
</commandLineArguments>
</daemon>
@@ -281,7 +281,7 @@
<platform>booter-windows</platform>
</platforms>
<commandLineArguments>
- <commandLineArgument>-app-nc-main-class</commandLineArgument>
+ <commandLineArgument>-app-class</commandLineArgument>
<commandLineArgument>org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint</commandLineArgument>
</commandLineArguments>
</daemon>
@@ -407,10 +407,10 @@
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<configuration>
- <ignoredDependencies>
+ <ignoredDependencies combine.children="append">
<ignoredDependency>org.apache.asterix:asterix-opt-bom:*</ignoredDependency>
</ignoredDependencies>
- <usedDependencies>
+ <usedDependencies combine.children="append">
<usedDependency>org.apache.asterix:asterix-app</usedDependency>
<usedDependency>org.apache.asterix:asterix-client-helper</usedDependency>
<usedDependency>org.apache.hadoop:hadoop-minicluster</usedDependency>
@@ -548,5 +548,9 @@
<artifactId>hadoop-minicluster</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-util</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/asterixdb/asterix-server/src/main/opt/ansible/conf/cc.conf b/asterixdb/asterix-server/src/main/opt/ansible/conf/cc.conf
index ff95344..4887732 100644
--- a/asterixdb/asterix-server/src/main/opt/ansible/conf/cc.conf
+++ b/asterixdb/asterix-server/src/main/opt/ansible/conf/cc.conf
@@ -19,6 +19,6 @@
log.level=INFO
[nc]
-txnlogdir=txnlog
+txn.log.dir=txnlog
iodevices=iodevice
command=asterixnc
diff --git a/asterixdb/asterix-server/src/main/opt/ansible/yaml/gen_conf.yml b/asterixdb/asterix-server/src/main/opt/ansible/yaml/gen_conf.yml
index f1be8a1..f1409ae 100644
--- a/asterixdb/asterix-server/src/main/opt/ansible/yaml/gen_conf.yml
+++ b/asterixdb/asterix-server/src/main/opt/ansible/yaml/gen_conf.yml
@@ -33,5 +33,5 @@
loop_var: node
- name: Populate CC to the cluster configuration file "{{ ccconf }}"
- shell: printf "\n[cc]\ncluster.address={{ groups['cc'][0] }}\n\n" >> "{{ ccconf }}"
+ shell: printf "\n[cc]\naddress={{ groups['cc'][0] }}\n\n" >> "{{ ccconf }}"
diff --git a/asterixdb/asterix-server/src/main/opt/aws/yaml/aws_start.yml b/asterixdb/asterix-server/src/main/opt/aws/yaml/aws_start.yml
index e0abb12..802abcd 100644
--- a/asterixdb/asterix-server/src/main/opt/aws/yaml/aws_start.yml
+++ b/asterixdb/asterix-server/src/main/opt/aws/yaml/aws_start.yml
@@ -72,7 +72,7 @@
shell: printf "\n[all:vars]\nansible_ssh_user={{ user }}\n" >> "{{ inventory }}"
- name: Populate CC to the cluster configuration file "{{ ccconf }}"
- shell: printf "[cc]\ncluster.address={{ cc.private_ip }}\n\n" > "{{ ccconf }}"
+ shell: printf "[cc]\naddress={{ cc.private_ip }}\n\n" > "{{ ccconf }}"
- name: Populate NCs to the cluster configuration file
shell: printf "[nc/1]\naddress={{ cc.private_ip }}\n\n" >> "{{ ccconf }}"
@@ -93,7 +93,7 @@
loop_var: node
- name: Populate the NC txnlogdir to the cluster configuration file {{ ccconf }}
- shell: printf "[nc]\ntxnlogdir={{ home_dir }}/txnlog\n" >> "{{ ccconf }}"
+ shell: printf "[nc]\ntxn.log.dir={{ home_dir }}/txnlog\n" >> "{{ ccconf }}"
- name: Populate NC iodevices to the cluster configuration file {{ ccconf }}
shell: printf "iodevices={{ home_dir }}/iodevice\n" >> "{{ ccconf }}"
diff --git a/asterixdb/asterix-server/src/main/opt/local/conf/cc.conf b/asterixdb/asterix-server/src/main/opt/local/conf/cc.conf
index 71b7514..d1f03bc 100644
--- a/asterixdb/asterix-server/src/main/opt/local/conf/cc.conf
+++ b/asterixdb/asterix-server/src/main/opt/local/conf/cc.conf
@@ -16,23 +16,23 @@
; under the License.
[nc/red]
-txnlogdir=data/red/txnlog
-coredumpdir=data/red/coredump
+txn.log.dir=data/red/txnlog
+core.dump.dir=data/red/coredump
iodevices=data/red
[nc/blue]
-port=9091
-txnlogdir=data/blue/txnlog
-coredumpdir=data/blue/coredump
+ncservice.port=9091
+txn.log.dir=data/blue/txnlog
+core.dump.dir=data/blue/coredump
iodevices=data/blue
[nc]
-storagedir=storage
+storage.subdir=storage
address=127.0.0.1
command=${NC_COMMAND}
[cc]
-cluster.address = 127.0.0.1
+address = 127.0.0.1
-[app]
+[common]
log.level=INFO
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java
index 7324a5c..566ebb8 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java
@@ -18,8 +18,6 @@
*/
package org.apache.asterix.server.test;
-import static org.apache.asterix.test.common.TestHelper.joinPath;
-
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@@ -35,6 +33,7 @@
import org.apache.asterix.testframework.context.TestCaseContext.OutputFormat;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
+import org.apache.hyracks.util.file.FileUtil;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
@@ -49,13 +48,13 @@
// Important paths and files for this test.
// The "target" subdirectory of asterix-server. All outputs go here.
- private static final String TARGET_DIR = joinPath(/*System.getProperty("basedir"),*/ "target");
+ private static final String TARGET_DIR = FileUtil.joinPath("target");
// Directory where the NCs create and store all data, as configured by
// src/test/resources/NCServiceExecutionIT/cc.conf.
- private static final String OUTPUT_DIR = joinPath(TARGET_DIR, "sample-local-cluster");
+ private static final String OUTPUT_DIR = FileUtil.joinPath(TARGET_DIR, "sample-local-cluster");
- private static final String LOCAL_SAMPLES_DIR = joinPath(OUTPUT_DIR, "opt", "local");
+ private static final String LOCAL_SAMPLES_DIR = FileUtil.joinPath(OUTPUT_DIR, "opt", "local");
@Rule
public TestRule watcher = new TestMethodTracer();
@@ -71,18 +70,18 @@
}
outDir.mkdirs();
- String installerZip = joinPath(TARGET_DIR,
- new File(TARGET_DIR).list((dir, name) -> name.matches("asterix-server.*-binary-assembly.zip"))[0]);
+ String[] pathElements = new String[]{TARGET_DIR, new File(TARGET_DIR).list((dir, name) -> name.matches("asterix-server.*-binary-assembly.zip"))[0]};
+ String installerZip = FileUtil.joinPath(pathElements);
TestHelper.unzip(installerZip, OUTPUT_DIR);
}
@Test
public void test0_startCluster() throws Exception {
- Process process = new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh"), "-f")
+ Process process = new ProcessBuilder(FileUtil.joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh"), "-f")
.inheritIO().start();
Assert.assertEquals(0, process.waitFor());
- process = new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/start-sample-cluster.sh")).inheritIO().start();
+ process = new ProcessBuilder(FileUtil.joinPath(LOCAL_SAMPLES_DIR, "bin/start-sample-cluster.sh")).inheritIO().start();
Assert.assertEquals(0, process.waitFor());
}
@@ -100,7 +99,7 @@
@Test
public void test2_stopCluster() throws Exception {
Process process =
- new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh")).inheritIO().start();
+ new ProcessBuilder(FileUtil.joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh")).inheritIO().start();
Assert.assertEquals(0, process.waitFor());
try {
new URL("http://127.0.0.1:19002").openConnection().connect();
diff --git a/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf b/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf
index 3f58362..cc87be4 100644
--- a/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf
+++ b/asterixdb/asterix-server/src/test/resources/NCServiceExecutionIT/cc.conf
@@ -16,27 +16,26 @@
; under the License.
[nc/asterix_nc1]
-txnlogdir=../asterix-server/target/tmp/asterix_nc1/txnlog
-coredumpdir=../asterix-server/target/tmp/asterix_nc1/coredump
+txn.log.dir=../asterix-server/target/tmp/asterix_nc1/txnlog
+core.dump.dir=../asterix-server/target/tmp/asterix_nc1/coredump
iodevices=../asterix-server/target/tmp/asterix_nc1/iodevice1,../asterix-server/target/tmp/asterix_nc1/iodevice2
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5006
[nc/asterix_nc2]
-port=9091
-txnlogdir=../asterix-server/target/tmp/asterix_nc2/txnlog
-coredumpdir=../asterix-server/target/tmp/asterix_nc2/coredump
+ncservice.port=9091
+txn.log.dir=../asterix-server/target/tmp/asterix_nc2/txnlog
+core.dump.dir=../asterix-server/target/tmp/asterix_nc2/coredump
iodevices=../asterix-server/target/tmp/asterix_nc2/iodevice1,../asterix-server/target/tmp/asterix_nc2/iodevice2
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5007
[nc]
address=127.0.0.1
command=asterixnc
app.class=org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint
jvm.args=-Xmx4096m -Dnode.Resolver="org.apache.asterix.external.util.IdentitiyResolverFactory"
-storagedir=test_storage
-
-[cc]
-cluster.address = 127.0.0.1
-app.class=org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint
-
-[app]
+storage.subdir=test_storage
storage.memorycomponent.globalbudget = 1073741824
+[cc]
+address = 127.0.0.1
+app.class=org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint
diff --git a/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java b/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java
index 56d3d07..b4aeeb1 100644
--- a/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java
+++ b/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixApplicationMaster.java
@@ -1032,36 +1032,32 @@
//get our java opts
vargs.add(ccJavaOpts);
vargs.add(CC_CLASSNAME);
- vargs.add("-app-cc-main-class org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint");
- vargs.add("-cluster-net-ip-address " + cC.getClusterIp());
- vargs.add("-client-net-ip-address " + cC.getClientIp());
+ vargs.add("-app-class org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint");
+ vargs.add("-address " + cC.getClusterIp());
+ vargs.add("-client-listen-address " + cC.getClientIp());
//pass CC optional parameters
if (clusterDesc.getHeartbeatPeriod() != null) {
vargs.add("-heartbeat-period " + String.valueOf(clusterDesc.getHeartbeatPeriod().intValue()));
}
if (clusterDesc.getMaxHeartbeatLapsePeriods() != null) {
- vargs.add("-max-heartbeat-lapse-periods "
+ vargs.add("-heartbeat-max-misses "
+ String.valueOf(clusterDesc.getMaxHeartbeatLapsePeriods().intValue()));
}
if (clusterDesc.getProfileDumpPeriod() != null) {
vargs.add("-profile-dump-period " + String.valueOf(clusterDesc.getProfileDumpPeriod().intValue()));
}
- if (clusterDesc.getDefaultMaxJobAttempts() != null) {
- vargs.add("-default-max-job-attempts "
- + String.valueOf(clusterDesc.getDefaultMaxJobAttempts().intValue()));
- }
if (clusterDesc.getJobHistorySize() != null) {
vargs.add("-job-history-size " + String.valueOf(clusterDesc.getJobHistorySize().intValue()));
}
if (clusterDesc.getResultTimeToLive() != null) {
- vargs.add("-result-time-to-live " + String.valueOf(clusterDesc.getResultTimeToLive().intValue()));
+ vargs.add("-result-ttl " + String.valueOf(clusterDesc.getResultTimeToLive().intValue()));
}
if (clusterDesc.getResultSweepThreshold() != null) {
vargs.add("-result-sweep-threshold "
+ String.valueOf(clusterDesc.getResultSweepThreshold().intValue()));
}
if (clusterDesc.getCcRoot() != null) {
- vargs.add("-cc-root " + clusterDesc.getCcRoot());
+ vargs.add("-root-dir " + clusterDesc.getCcRoot());
}
ccStarted.set(true);
@@ -1078,14 +1074,13 @@
}
vargs.add(ncJavaOpts);
vargs.add(NC_CLASSNAME);
- vargs.add("-app-nc-main-class org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint");
+ vargs.add("-app-class org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint");
vargs.add("-node-id " + local.getId());
- vargs.add("-cc-host " + cC.getClusterIp());
+ vargs.add("-cluster-address " + cC.getClusterIp());
vargs.add("-iodevices " + iodevice);
- vargs.add("-cluster-net-ip-address " + local.getClusterIp());
- vargs.add("-data-ip-address " + local.getClusterIp());
- vargs.add("-result-ip-address " + local.getClusterIp());
- vargs.add("--");
+ vargs.add("-address " + local.getClusterIp());
+ vargs.add("-data-listen-address " + local.getClusterIp());
+ vargs.add("-result-listen-address " + local.getClusterIp());
if (initial) {
vargs.add("-initial-run ");
}
diff --git a/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixYARNClient.java b/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixYARNClient.java
index f9d10af..0ccaf07 100644
--- a/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixYARNClient.java
+++ b/asterixdb/asterix-yarn/src/main/java/org/apache/asterix/aoya/AsterixYARNClient.java
@@ -1362,12 +1362,12 @@
}
configuration.setStore(stores);
List<Coredump> coredump = new ArrayList<Coredump>();
- String coredumpDir = null;
+ String coredumpdir = null;
List<TransactionLogDir> txnLogDirs = new ArrayList<TransactionLogDir>();
String txnLogDir = null;
for (Node node : cluster.getNode()) {
- coredumpDir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
- coredump.add(new Coredump(node.getId(), coredumpDir + "coredump" + File.separator));
+ coredumpdir = node.getLogDir() == null ? cluster.getLogDir() : node.getLogDir();
+ coredump.add(new Coredump(node.getId(), coredumpdir + "coredump" + File.separator));
txnLogDir = node.getTxnLogDir() == null ? cluster.getTxnLogDir() : node.getTxnLogDir(); //node or cluster-wide
txnLogDirs.add(new TransactionLogDir(node.getId(), txnLogDir
+ (txnLogDir.charAt(txnLogDir.length() - 1) == File.separatorChar ? File.separator : "")
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
index 1d509cf..fe6472e 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
+++ b/hyracks-fullstack/algebricks/algebricks-tests/pom.xml
@@ -71,7 +71,7 @@
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>data/**</exclude>
<exclude>src/test/resources/results/scanMicroSortWrite.out</exclude>
</excludes>
@@ -169,6 +169,11 @@
<version>${project.version}</version>
</dependency>
<dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-util</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
index 9880047..2971b72 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
@@ -18,6 +18,8 @@
*/
package org.apache.hyracks.algebricks.tests.util;
+import static org.apache.hyracks.util.file.FileUtil.joinPath;
+
import java.io.File;
import java.util.EnumSet;
@@ -47,43 +49,41 @@
private static IHyracksClientConnection hcc;
public static void init() throws Exception {
- FileUtils.deleteQuietly(new File("target" + File.separator + "data"));
- FileUtils.copyDirectory(new File("data"), new File("target" + File.separator + "data"));
+ FileUtils.deleteQuietly(new File(joinPath("target", "data")));
+ FileUtils.copyDirectory(new File("data"), new File(joinPath("target", "data")));
CCConfig ccConfig = new CCConfig();
- ccConfig.clientNetIpAddress = "127.0.0.1";
- ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_NET_PORT;
- ccConfig.clusterNetIpAddress = "127.0.0.1";
- ccConfig.clusterNetPort = TEST_HYRACKS_CC_CLUSTER_NET_PORT;
+ ccConfig.setClientListenAddress("127.0.0.1");
+ ccConfig.setClientListenPort(TEST_HYRACKS_CC_CLIENT_NET_PORT);
+ ccConfig.setClusterListenAddress("127.0.0.1");
+ ccConfig.setClusterListenPort(TEST_HYRACKS_CC_CLUSTER_NET_PORT);
cc = new ClusterControllerService(ccConfig);
cc.start();
- NCConfig ncConfig1 = new NCConfig();
- ncConfig1.ccHost = "localhost";
- ncConfig1.ccPort = TEST_HYRACKS_CC_CLUSTER_NET_PORT;
- ncConfig1.clusterNetIPAddress = "127.0.0.1";
- ncConfig1.dataIPAddress = "127.0.0.1";
- ncConfig1.resultIPAddress = "127.0.0.1";
- ncConfig1.nodeId = NC1_ID;
- ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
- + File.separator + "device0";
- FileUtils.forceMkdir(new File(ncConfig1.ioDevices));
+ NCConfig ncConfig1 = new NCConfig(NC1_ID);
+ ncConfig1.setClusterAddress("localhost");
+ ncConfig1.setClusterPort(TEST_HYRACKS_CC_CLUSTER_NET_PORT);
+ ncConfig1.setClusterListenAddress("127.0.0.1");
+ ncConfig1.setDataListenAddress("127.0.0.1");
+ ncConfig1.setResultListenAddress("127.0.0.1");
+ ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data",
+ "device0") });
+ FileUtils.forceMkdir(new File(ncConfig1.getIODevices()[0]));
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
- NCConfig ncConfig2 = new NCConfig();
- ncConfig2.ccHost = "localhost";
- ncConfig2.ccPort = TEST_HYRACKS_CC_CLUSTER_NET_PORT;
- ncConfig2.clusterNetIPAddress = "127.0.0.1";
- ncConfig2.dataIPAddress = "127.0.0.1";
- ncConfig2.resultIPAddress = "127.0.0.1";
- ncConfig2.nodeId = NC2_ID;
- ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
- + File.separator + "device1";
- FileUtils.forceMkdir(new File(ncConfig1.ioDevices));
+ NCConfig ncConfig2 = new NCConfig(NC2_ID);
+ ncConfig2.setClusterAddress("localhost");
+ ncConfig2.setClusterPort(TEST_HYRACKS_CC_CLUSTER_NET_PORT);
+ ncConfig2.setClusterListenAddress("127.0.0.1");
+ ncConfig2.setDataListenAddress("127.0.0.1");
+ ncConfig2.setResultListenAddress("127.0.0.1");
+ ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data",
+ "device1") });
+ FileUtils.forceMkdir(new File(ncConfig1.getIODevices()[0]));
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
- hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
+ hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
}
public static void deinit() throws Exception {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/pom.xml b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
index cde607f..ec60022 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-api/pom.xml
@@ -94,5 +94,9 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>args4j</groupId>
+ <artifactId>args4j</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationConfig.java
deleted file mode 100644
index 278a9d7..0000000
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationConfig.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.api.application;
-
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.Set;
-
-/**
- * Accessor for the data contained in the global application configuration file.
- */
-public interface IApplicationConfig {
- String getString(String section, String key);
-
- String getString(String section, String key, String defaultValue);
-
- int getInt(String section, String key);
-
- int getInt(String section, String key, int defaultValue);
-
- long getLong(String section, String key);
-
- long getLong(String section, String key, long defaultValue);
-
- Set<String> getSections();
-
- Set<String> getKeys(String section);
-
- String[] getStringArray(String section, String key);
-
- List<Set<Entry<String, String>>> getMultiSections(String section);
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationContext.java
index 7b07174..c933d9d 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IApplicationContext.java
@@ -21,6 +21,7 @@
import java.io.Serializable;
import java.util.concurrent.ThreadFactory;
+import org.apache.hyracks.api.config.IApplicationConfig;
import org.apache.hyracks.api.job.IJobSerializerDeserializerContainer;
import org.apache.hyracks.api.messages.IMessageBroker;
import org.apache.hyracks.api.service.IControllerService;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCApplicationEntryPoint.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCApplicationEntryPoint.java
index c11cc7a..4f6f450 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCApplicationEntryPoint.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCApplicationEntryPoint.java
@@ -18,7 +18,9 @@
*/
package org.apache.hyracks.api.application;
+import org.apache.hyracks.api.config.IConfigManager;
import org.apache.hyracks.api.job.resource.IJobCapacityController;
+import org.kohsuke.args4j.OptionHandlerFilter;
public interface ICCApplicationEntryPoint {
void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception;
@@ -28,4 +30,10 @@
void startupCompleted() throws Exception;
IJobCapacityController getJobCapacityController();
+
+ void registerConfig(IConfigManager configManager);
+
+ default OptionHandlerFilter getUsageFilter() {
+ return OptionHandlerFilter.PUBLIC;
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java
index 191a4af..2c53e1c 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/IClusterLifecycleListener.java
@@ -20,8 +20,8 @@
import java.util.Collection;
import java.util.Map;
-import java.util.Set;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksException;
/**
@@ -40,9 +40,8 @@
* @param nodeId
* A unique identifier of a Node Controller
* @param ncConfiguration
- * A map containing the set of configuration parameters that were used to start the Node Controller
*/
- public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) throws HyracksException;
+ public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException;
/**
* @param deadNodeIds
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/INCApplicationEntryPoint.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/INCApplicationEntryPoint.java
index dea6e4b..a92cd4a 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/INCApplicationEntryPoint.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/INCApplicationEntryPoint.java
@@ -18,7 +18,9 @@
*/
package org.apache.hyracks.api.application;
+import org.apache.hyracks.api.config.IConfigManager;
import org.apache.hyracks.api.job.resource.NodeCapacity;
+import org.kohsuke.args4j.OptionHandlerFilter;
public interface INCApplicationEntryPoint {
void start(INCApplicationContext ncAppCtx, String[] args) throws Exception;
@@ -28,4 +30,10 @@
void stop() throws Exception;
NodeCapacity getCapacity();
+
+ void registerConfigOptions(IConfigManager configManager);
+
+ default OptionHandlerFilter getUsageFilter() {
+ return OptionHandlerFilter.PUBLIC;
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
new file mode 100644
index 0000000..0335b80
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.config;
+
+import java.util.List;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.logging.Level;
+
+/**
+ * Accessor for the data contained in the global application configuration file.
+ */
+public interface IApplicationConfig {
+ String getString(String section, String key);
+
+ int getInt(String section, String key);
+
+ long getLong(String section, String key);
+
+ Set<String> getSectionNames();
+
+ Set<String> getKeys(String section);
+
+ Object getStatic(IOption option);
+
+ List<String> getNCNames();
+
+ IOption lookupOption(String sectionName, String propertyName);
+
+ Set<IOption> getOptions();
+
+ Set<IOption> getOptions(Section section);
+
+ IApplicationConfig getNCEffectiveConfig(String nodeId);
+
+ Set<Section> getSections();
+
+ Set<Section> getSections(Predicate<Section> predicate);
+
+ default Object get(IOption option) {
+ return option.get(this);
+ }
+
+ default long getLong(IOption option) {
+ return (long)get(option);
+ }
+
+ default int getInt(IOption option) {
+ return (int)get(option);
+ }
+
+ default String getString(IOption option) {
+ return (String)get(option);
+ }
+
+ default boolean getBoolean(IOption option) {
+ return (boolean)get(option);
+ }
+
+ default Level getLoggingLevel(IOption option) {
+ return (Level)get(option);
+ }
+
+ default double getDouble(IOption option) {
+ return (double)get(option);
+ }
+
+ default String [] getStringArray(IOption option) {
+ return (String [])get(option);
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IConfigManager.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IConfigManager.java
new file mode 100644
index 0000000..fb1332b
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IConfigManager.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.config;
+
+import java.util.Set;
+import java.util.function.Predicate;
+
+import org.kohsuke.args4j.OptionHandlerFilter;
+
+public interface IConfigManager {
+ int PARSE_INI_POINTERS_METRIC = 100;
+ int PARSE_INI_METRIC = 200;
+ int PARSE_COMMAND_LINE_METRIC = 300;
+ int APPLY_DEFAULTS_METRIC = 400;
+
+ void register(IOption... options);
+
+ @SuppressWarnings("unchecked")
+ void register(Class<? extends IOption>... optionClasses);
+
+ Set<Section> getSections(Predicate<Section> predicate);
+
+ Set<Section> getSections();
+
+ Set<IOption> getOptions(Section section);
+
+ IApplicationConfig getAppConfig();
+
+ void addConfigurator(int metric, IConfigurator configurator);
+
+ void addIniParamOptions(IOption... options);
+
+ void addCmdLineSections(Section... sections);
+
+ void setUsageFilter(OptionHandlerFilter usageFilter);
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IConfigurator.java
similarity index 79%
rename from asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http
rename to hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IConfigurator.java
index 5976b5d..54f780d 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IConfigurator.java
@@ -16,10 +16,12 @@
* specific language governing permissions and limitations
* under the License.
*/
-/*
- * Test case Name : replication
- * Description : Replication
- * Expected Result : Positive
- * Date : 28th October 2016
- */
-/admin/cluster/replication
+package org.apache.hyracks.api.config;
+
+import java.io.IOException;
+
+import org.kohsuke.args4j.CmdLineException;
+
+public interface IConfigurator {
+ void run() throws IOException, CmdLineException;
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
new file mode 100644
index 0000000..834d73c
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.config;
+
+import java.util.function.Function;
+
+public interface IOption {
+
+ String name();
+
+ Section section();
+
+ String description();
+
+ IOptionType type();
+
+ /**
+ * @return the unresolved default value of this option-
+ */
+ Object defaultValue();
+
+ /**
+ * @return a string to describe the default value, or null if the default should be used
+ */
+ default String usageDefaultOverride(IApplicationConfig accessor, Function<IOption, String> optionPrinter) {
+ return null;
+ }
+
+ /**
+ * Implementations should override this default implementation if this property value is non-static and should be
+ * calculated on every call
+ * @return the current value of this property
+ */
+ default Object get(IApplicationConfig appConfig) {
+ return appConfig.getStatic(this);
+ }
+
+ /**
+ * @return a true value indicates this option should not be advertised (e.g. command-line usage, documentation)
+ */
+ default boolean hidden() { return false; }
+
+ default String cmdline() {
+ return "-" + name().toLowerCase().replace("_", "-");
+ }
+
+ default String ini() {
+ return name().toLowerCase().replace("_", ".");
+ }
+
+ default String toIniString() {
+ return "[" + section().sectionName() + "] " + ini();
+ }
+}
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOptionType.java
similarity index 61%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http
copy to hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOptionType.java
index 5976b5d..2a98fdc 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOptionType.java
@@ -16,10 +16,25 @@
* specific language governing permissions and limitations
* under the License.
*/
-/*
- * Test case Name : replication
- * Description : Replication
- * Expected Result : Positive
- * Date : 28th October 2016
- */
-/admin/cluster/replication
+package org.apache.hyracks.api.config;
+
+public interface IOptionType<T> {
+ /**
+ * @throws IllegalArgumentException when the supplied string cannot be interpreted
+ */
+ T parse(String s);
+
+ Class<T> targetType();
+
+ default Object serializeToJSON(Object value) {
+ return value;
+ }
+
+ default String serializeToIni(Object value) {
+ return String.valueOf(value);
+ }
+
+ default String serializeToString(Object value) {
+ return serializeToIni(value);
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/Section.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/Section.java
new file mode 100644
index 0000000..8b19d80
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/Section.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.api.config;
+
+public enum Section {
+ CC,
+ NC,
+ COMMON,
+ LOCALNC,
+ EXTENSION,
+ VIRTUAL; // virtual section indicates options which are not accessible from the cmd-line nor ini file
+
+ public static Section parseSectionName(String name) {
+ for (Section section : values()) {
+ if (section.sectionName().equals(name)) {
+ return section;
+ }
+ }
+ return null;
+ }
+
+ public String sectionName() {
+ return name().toLowerCase();
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/ICCContext.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/ICCContext.java
index f9618cf..83e0482 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/ICCContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/ICCContext.java
@@ -27,9 +27,9 @@
import org.apache.hyracks.api.topology.ClusterTopology;
public interface ICCContext {
- public ClusterControllerInfo getClusterControllerInfo();
+ ClusterControllerInfo getClusterControllerInfo();
- public void getIPAddressNodeMap(Map<InetAddress, Set<String>> map) throws HyracksDataException;
+ void getIPAddressNodeMap(Map<InetAddress, Set<String>> map) throws HyracksDataException;
- public ClusterTopology getClusterTopology();
+ ClusterTopology getClusterTopology();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
index 552fbeb..34c58f8 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
@@ -22,7 +22,6 @@
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
-import java.util.StringTokenizer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -79,11 +78,10 @@
* comma separated list of devices
* @return
*/
- public static List<IODeviceHandle> getDevices(String ioDevices) {
+ public static List<IODeviceHandle> getDevices(String [] ioDevices) {
List<IODeviceHandle> devices = new ArrayList<>();
- StringTokenizer tok = new StringTokenizer(ioDevices, ",");
- while (tok.hasMoreElements()) {
- String devPath = tok.nextToken().trim();
+ for (String ioDevice : ioDevices) {
+ String devPath = ioDevice.trim();
devices.add(new IODeviceHandle(new File(devPath), "."));
}
return devices;
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/HyracksUtils.java b/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/HyracksUtils.java
index a41ddd9..54ae838 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/HyracksUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/HyracksUtils.java
@@ -50,37 +50,34 @@
public static void init() throws Exception {
CCConfig ccConfig = new CCConfig();
- ccConfig.clientNetIpAddress = CC_HOST;
- ccConfig.clusterNetIpAddress = CC_HOST;
- ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
- ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
- ccConfig.defaultMaxJobAttempts = 0;
- ccConfig.jobHistorySize = 0;
- ccConfig.profileDumpPeriod = -1;
- ccConfig.heartbeatPeriod = 50;
+ ccConfig.setClientListenAddress(CC_HOST);
+ ccConfig.setClusterListenAddress(CC_HOST);
+ ccConfig.setClusterListenPort(TEST_HYRACKS_CC_PORT);
+ ccConfig.setClientListenPort(TEST_HYRACKS_CC_CLIENT_PORT);
+ ccConfig.setJobHistorySize(0);
+ ccConfig.setProfileDumpPeriod(-1);
+ ccConfig.setHeartbeatPeriod(50);
// cluster controller
cc = new ClusterControllerService(ccConfig);
cc.start();
// two node controllers
- NCConfig ncConfig1 = new NCConfig();
- ncConfig1.ccHost = "localhost";
- ncConfig1.clusterNetIPAddress = "localhost";
- ncConfig1.ccPort = TEST_HYRACKS_CC_PORT;
- ncConfig1.dataIPAddress = "127.0.0.1";
- ncConfig1.resultIPAddress = "127.0.0.1";
- ncConfig1.nodeId = NC1_ID;
+ NCConfig ncConfig1 = new NCConfig(NC1_ID);
+ ncConfig1.setClusterAddress("localhost");
+ ncConfig1.setClusterListenAddress("localhost");
+ ncConfig1.setClusterPort(TEST_HYRACKS_CC_PORT);
+ ncConfig1.setDataListenAddress("127.0.0.1");
+ ncConfig1.setResultListenAddress("127.0.0.1");
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
- NCConfig ncConfig2 = new NCConfig();
- ncConfig2.ccHost = "localhost";
- ncConfig2.clusterNetIPAddress = "localhost";
- ncConfig2.ccPort = TEST_HYRACKS_CC_PORT;
- ncConfig2.dataIPAddress = "127.0.0.1";
- ncConfig2.resultIPAddress = "127.0.0.1";
- ncConfig2.nodeId = NC2_ID;
+ NCConfig ncConfig2 = new NCConfig(NC2_ID);
+ ncConfig2.setClusterAddress("localhost");
+ ncConfig2.setClusterListenAddress("localhost");
+ ncConfig2.setClusterPort(TEST_HYRACKS_CC_PORT);
+ ncConfig2.setDataListenAddress("127.0.0.1");
+ ncConfig2.setResultListenAddress("127.0.0.1");
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index 9dc3c1a..ba086ad 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -63,7 +63,7 @@
<licenseFamilies combine.children="append">
<licenseFamily implementation="org.apache.rat.license.MITLicenseFamily"/>
</licenseFamilies>
- <excludes>
+ <excludes combine.children="append">
<!-- See hyracks-fullstack-license/src/main/licenses/templates/source_licenses.ftl -->
<exclude>src/main/resources/static/javascript/flot/jquery.flot.resize.min.js</exclude>
<exclude>src/main/resources/static/javascript/jsplumb/jquery.jsPlumb-1.3.5-all-min.js</exclude>
@@ -104,7 +104,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCApplicationEntryPoint.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCApplicationEntryPoint.java
new file mode 100644
index 0000000..07008df
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCApplicationEntryPoint.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.cc;
+
+import java.util.Arrays;
+
+import org.apache.hyracks.api.application.ICCApplicationContext;
+import org.apache.hyracks.api.application.ICCApplicationEntryPoint;
+import org.apache.hyracks.api.config.IConfigManager;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.api.job.resource.DefaultJobCapacityController;
+import org.apache.hyracks.api.job.resource.IJobCapacityController;
+import org.apache.hyracks.control.common.controllers.CCConfig;
+import org.apache.hyracks.control.common.controllers.ControllerConfig;
+import org.apache.hyracks.control.common.controllers.NCConfig;
+
+public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
+ public static final ICCApplicationEntryPoint INSTANCE = new CCApplicationEntryPoint();
+
+ protected CCApplicationEntryPoint() {
+ }
+
+ @Override
+ public void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception {
+ if (args.length > 0) {
+ throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
+ }
+ }
+
+ @Override
+ public void stop() throws Exception {
+ // no-op
+ }
+
+ @Override
+ public void startupCompleted() throws Exception {
+ // no-op
+ }
+
+ @Override
+ public IJobCapacityController getJobCapacityController() {
+ return DefaultJobCapacityController.INSTANCE;
+ }
+
+ @Override
+ public void registerConfig(IConfigManager configManager) {
+ configManager.addIniParamOptions(ControllerConfig.Option.CONFIG_FILE, ControllerConfig.Option.CONFIG_FILE_URL);
+ configManager.addCmdLineSections(Section.CC, Section.COMMON);
+ configManager.setUsageFilter(getUsageFilter());
+ configManager.register(ControllerConfig.Option.class, CCConfig.Option.class, NCConfig.Option.class);
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
index dff3107..754deac 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/CCDriver.java
@@ -18,32 +18,50 @@
*/
package org.apache.hyracks.control.cc;
-import org.kohsuke.args4j.CmdLineParser;
+import static org.apache.hyracks.control.common.controllers.CCConfig.Option.APP_CLASS;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hyracks.api.application.ICCApplicationEntryPoint;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.control.common.config.ConfigUtils;
import org.apache.hyracks.control.common.controllers.CCConfig;
+import org.kohsuke.args4j.CmdLineException;
public class CCDriver {
- public static void main(String args []) throws Exception {
- try {
- CCConfig ccConfig = new CCConfig();
- CmdLineParser cp = new CmdLineParser(ccConfig);
- try {
- cp.parseArgument(args);
- } catch (Exception e) {
- System.err.println(e.getMessage());
- cp.printUsage(System.err);
- return;
- }
- ccConfig.loadConfigAndApplyDefaults();
+ private static final Logger LOGGER = Logger.getLogger(CCDriver.class.getName());
- ClusterControllerService ccService = new ClusterControllerService(ccConfig);
+ private CCDriver() {
+ }
+
+ public static void main(String[] args) throws Exception {
+ try {
+ final ConfigManager configManager = new ConfigManager(args);
+ ICCApplicationEntryPoint appEntryPoint = getAppEntryPoint(args);
+ appEntryPoint.registerConfig(configManager);
+ CCConfig ccConfig = new CCConfig(configManager);
+ ClusterControllerService ccService = new ClusterControllerService(ccConfig, appEntryPoint);
ccService.start();
while (true) {
Thread.sleep(100000);
}
+ } catch (CmdLineException e) {
+ LOGGER.log(Level.FINE, "Exception parsing command line: " + Arrays.toString(args), e);
+ System.exit(2);
} catch (Exception e) {
- e.printStackTrace();
+ LOGGER.log(Level.SEVERE, "Exiting NCDriver due to exception", e);
System.exit(1);
}
}
+
+ private static ICCApplicationEntryPoint getAppEntryPoint(String[] args)
+ throws ClassNotFoundException, InstantiationException, IllegalAccessException, IOException {
+ // determine app class so that we can use the correct implementation of the configuration...
+ String appClassName = ConfigUtils.getOptionValue(args, APP_CLASS);
+ return appClassName != null ? (ICCApplicationEntryPoint) (Class.forName(appClassName)).newInstance()
+ : CCApplicationEntryPoint.INSTANCE;
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index c47284c..21b9dcf 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -20,6 +20,7 @@
import java.io.File;
import java.io.FileReader;
+import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.InetAddress;
@@ -32,20 +33,22 @@
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
+import java.util.TreeMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.commons.lang3.tuple.Pair;
import org.apache.hyracks.api.application.ICCApplicationEntryPoint;
import org.apache.hyracks.api.client.ClusterControllerInfo;
import org.apache.hyracks.api.comm.NetworkAddress;
+import org.apache.hyracks.api.config.IApplicationConfig;
import org.apache.hyracks.api.context.ICCContext;
import org.apache.hyracks.api.deployment.DeploymentId;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
-import org.apache.hyracks.api.job.resource.DefaultJobCapacityController;
import org.apache.hyracks.api.job.resource.IJobCapacityController;
import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.api.topology.ClusterTopology;
@@ -66,9 +69,10 @@
import org.apache.hyracks.control.cc.work.RemoveDeadNodesWork;
import org.apache.hyracks.control.cc.work.ShutdownNCServiceWork;
import org.apache.hyracks.control.cc.work.TriggerNCWork;
+import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.context.ServerContext;
import org.apache.hyracks.control.common.controllers.CCConfig;
-import org.apache.hyracks.control.common.controllers.IniUtils;
+import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.common.deployment.DeploymentRun;
import org.apache.hyracks.control.common.ipc.CCNCFunctions;
import org.apache.hyracks.control.common.logs.LogFile;
@@ -77,7 +81,6 @@
import org.apache.hyracks.ipc.api.IIPCI;
import org.apache.hyracks.ipc.impl.IPCSystem;
import org.apache.hyracks.ipc.impl.JavaSerializationBasedPayloadSerializerDeserializer;
-import org.ini4j.Ini;
import org.xml.sax.InputSource;
public class ClusterControllerService implements IControllerService {
@@ -85,6 +88,8 @@
private final CCConfig ccConfig;
+ private final ConfigManager configManager;
+
private IPCSystem clusterIPC;
private IPCSystem clientIPC;
@@ -127,11 +132,22 @@
private ShutdownRun shutdownCallback;
- private ICCApplicationEntryPoint aep;
+ private final ICCApplicationEntryPoint aep;
- public ClusterControllerService(final CCConfig ccConfig) throws Exception {
- this.ccConfig = ccConfig;
- File jobLogFolder = new File(ccConfig.ccRoot, "logs/jobs");
+ public ClusterControllerService(final CCConfig config) throws Exception {
+ this(config, getApplicationEntryPoint(config));
+ }
+
+ public ClusterControllerService(final CCConfig config,
+ final ICCApplicationEntryPoint aep) throws Exception {
+ this.ccConfig = config;
+ this.configManager = ccConfig.getConfigManager();
+ if (aep == null) {
+ throw new IllegalArgumentException("ICCApplicationEntryPoint cannot be null");
+ }
+ this.aep = aep;
+ configManager.processConfig();
+ File jobLogFolder = new File(ccConfig.getRootDir(), "logs/jobs");
jobLog = new LogFile(jobLogFolder);
// WorkQueue is in charge of heartbeat as well as other events.
@@ -140,7 +156,8 @@
final ClusterTopology topology = computeClusterTopology(ccConfig);
ccContext = new ClusterControllerContext(topology);
sweeper = new DeadNodeSweeper();
- datasetDirectoryService = new DatasetDirectoryService(ccConfig.resultTTL, ccConfig.resultSweepThreshold);
+ datasetDirectoryService = new DatasetDirectoryService(ccConfig.getResultTTL(),
+ ccConfig.getResultSweepThreshold());
deploymentRunMap = new HashMap<>();
stateDumpRunMap = new HashMap<>();
@@ -151,10 +168,10 @@
}
private static ClusterTopology computeClusterTopology(CCConfig ccConfig) throws Exception {
- if (ccConfig.clusterTopologyDefinition == null) {
+ if (ccConfig.getClusterTopology() == null) {
return null;
}
- FileReader fr = new FileReader(ccConfig.clusterTopologyDefinition);
+ FileReader fr = new FileReader(ccConfig.getClusterTopology());
InputSource in = new InputSource(fr);
try {
return TopologyDefinitionParser.parse(in);
@@ -166,20 +183,21 @@
@Override
public void start() throws Exception {
LOGGER.log(Level.INFO, "Starting ClusterControllerService: " + this);
- serverCtx = new ServerContext(ServerContext.ServerType.CLUSTER_CONTROLLER, new File(ccConfig.ccRoot));
+ serverCtx = new ServerContext(ServerContext.ServerType.CLUSTER_CONTROLLER, new File(ccConfig.getRootDir()));
IIPCI ccIPCI = new ClusterControllerIPCI(this);
- clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.clusterNetPort), ccIPCI,
+ clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.getClusterListenPort()), ccIPCI,
new CCNCFunctions.SerializerDeserializer());
IIPCI ciIPCI = new ClientInterfaceIPCI(this);
- clientIPC = new IPCSystem(new InetSocketAddress(ccConfig.clientNetIpAddress, ccConfig.clientNetPort), ciIPCI,
- new JavaSerializationBasedPayloadSerializerDeserializer());
- webServer = new WebServer(this, ccConfig.httpPort);
+ clientIPC = new IPCSystem(
+ new InetSocketAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort()),
+ ciIPCI, new JavaSerializationBasedPayloadSerializerDeserializer());
+ webServer = new WebServer(this, ccConfig.getConsoleListenPort());
clusterIPC.start();
clientIPC.start();
webServer.start();
- info = new ClusterControllerInfo(ccConfig.clientNetIpAddress, ccConfig.clientNetPort,
+ info = new ClusterControllerInfo(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort(),
webServer.getListeningPort());
- timer.schedule(sweeper, 0, ccConfig.heartbeatPeriod);
+ timer.schedule(sweeper, 0, ccConfig.getHeartbeatPeriod());
jobLog.open();
startApplication();
@@ -194,84 +212,62 @@
appCtx = new CCApplicationContext(this, serverCtx, ccContext, ccConfig.getAppConfig());
appCtx.addJobLifecycleListener(datasetDirectoryService);
executor = Executors.newCachedThreadPool(appCtx.getThreadFactory());
- String className = ccConfig.appCCMainClass;
-
- IJobCapacityController jobCapacityController = DefaultJobCapacityController.INSTANCE;
- if (className != null) {
- Class<?> c = Class.forName(className);
- aep = (ICCApplicationEntryPoint) c.newInstance();
- String[] args = ccConfig.appArgs == null ? null
- : ccConfig.appArgs.toArray(new String[ccConfig.appArgs.size()]);
- aep.start(appCtx, args);
- jobCapacityController = aep.getJobCapacityController();
- }
+ aep.start(appCtx, ccConfig.getAppArgsArray());
+ IJobCapacityController jobCapacityController = aep.getJobCapacityController();
// Job manager is in charge of job lifecycle management.
try {
Constructor<?> jobManagerConstructor = this.getClass().getClassLoader()
- .loadClass(ccConfig.jobManagerClassName)
+ .loadClass(ccConfig.getJobManagerClass())
.getConstructor(CCConfig.class, ClusterControllerService.class, IJobCapacityController.class);
jobManager = (IJobManager) jobManagerConstructor.newInstance(ccConfig, this, jobCapacityController);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
| InvocationTargetException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "class " + ccConfig.jobManagerClassName + " could not be used: ", e);
+ LOGGER.log(Level.WARNING, "class " + ccConfig.getJobManagerClass() + " could not be used: ", e);
}
// Falls back to the default implementation if the user-provided class name is not valid.
jobManager = new JobManager(ccConfig, this, jobCapacityController);
}
}
- private void connectNCs() throws Exception {
- Ini ini = ccConfig.getIni();
- if (ini == null || Boolean.parseBoolean(ini.get("cc", "virtual.cluster"))) {
- return;
- }
- for (String section : ini.keySet()) {
- if (!section.startsWith("nc/")) {
- continue;
+ private Map<String, Pair<String, Integer>> getNCServices() throws IOException {
+ Map<String, Pair<String, Integer>> ncMap = new TreeMap<>();
+ for (String ncId : configManager.getNodeNames()) {
+ IApplicationConfig ncConfig = configManager.getNodeEffectiveConfig(ncId);
+ if (!ncConfig.getBoolean(NCConfig.Option.VIRTUAL_NC)) {
+ ncMap.put(ncId, Pair.of(ncConfig.getString(NCConfig.Option.NCSERVICE_ADDRESS),
+ ncConfig.getInt(NCConfig.Option.NCSERVICE_PORT)));
}
- String ncid = section.substring(3);
- String address = IniUtils.getString(ini, section, "address", null);
- int port = IniUtils.getInt(ini, section, "port", 9090);
- if (address == null) {
- address = InetAddress.getLoopbackAddress().getHostAddress();
- }
- workQueue.schedule(new TriggerNCWork(this, address, port, ncid));
}
+ return ncMap;
+ }
+
+ private void connectNCs() throws IOException {
+ getNCServices().entrySet().forEach(ncService -> {
+ final TriggerNCWork triggerWork = new TriggerNCWork(ClusterControllerService.this,
+ ncService.getValue().getLeft(), ncService.getValue().getRight(), ncService.getKey());
+ workQueue.schedule(triggerWork);
+ });
}
private void terminateNCServices() throws Exception {
- Ini ini = ccConfig.getIni();
- if (ini == null || Boolean.parseBoolean(ini.get("cc", "virtual.cluster"))) {
- return;
- }
List<ShutdownNCServiceWork> shutdownNCServiceWorks = new ArrayList<>();
- for (String section : ini.keySet()) {
- if (!section.startsWith("nc/")) {
- continue;
- }
- String ncid = section.substring(3);
- String address = IniUtils.getString(ini, section, "address", null);
- int port = IniUtils.getInt(ini, section, "port", 9090);
- if (address == null) {
- address = InetAddress.getLoopbackAddress().getHostAddress();
- }
- ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(address, port, ncid);
+ getNCServices().entrySet().forEach(ncService -> {
+ ShutdownNCServiceWork shutdownWork = new ShutdownNCServiceWork(ncService.getValue().getLeft(),
+ ncService.getValue().getRight(), ncService.getKey());
workQueue.schedule(shutdownWork);
shutdownNCServiceWorks.add(shutdownWork);
- }
+ });
for (ShutdownNCServiceWork shutdownWork : shutdownNCServiceWorks) {
shutdownWork.sync();
}
}
private void notifyApplication() throws Exception {
- if (aep != null) {
- // Sometimes, there is no application entry point. Check hyracks-client project
- aep.startupCompleted();
- }
+ aep.startupCompleted();
}
+
public void stop(boolean terminateNCService) throws Exception {
if (terminateNCService) {
terminateNCServices();
@@ -294,9 +290,7 @@
}
private void stopApplication() throws Exception {
- if (aep != null) {
- aep.stop();
- }
+ aep.stop();
}
public ServerContext getServerContext() {
@@ -360,7 +354,7 @@
}
public NetworkAddress getDatasetDirectoryServiceInfo() {
- return new NetworkAddress(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
+ return new NetworkAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
}
private final class ClusterControllerContext implements ICCContext {
@@ -390,6 +384,7 @@
public ClusterTopology getClusterTopology() {
return topology;
}
+
}
private class DeadNodeSweeper extends TimerTask {
@@ -458,4 +453,14 @@
public ThreadDumpRun removeThreadDumpRun(String requestKey) {
return threadDumpRunMap.remove(requestKey);
}
+
+ private static ICCApplicationEntryPoint getApplicationEntryPoint(CCConfig ccConfig)
+ throws ClassNotFoundException, IllegalAccessException, InstantiationException {
+ if (ccConfig.getAppClass() != null) {
+ Class<?> c = Class.forName(ccConfig.getAppClass());
+ return (ICCApplicationEntryPoint) c.newInstance();
+ } else {
+ return CCApplicationEntryPoint.INSTANCE;
+ }
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
index 955b7f2..8400a59 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/NodeControllerState.java
@@ -282,7 +282,7 @@
public synchronized ObjectNode toSummaryJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode o = om.createObjectNode();
- o.put("node-id", ncConfig.nodeId);
+ o.put("node-id", ncConfig.getNodeId());
o.put("heap-used", heapUsedSize[(rrdPtr + RRD_SIZE - 1) % RRD_SIZE]);
o.put("system-load-average", systemLoadAverage[(rrdPtr + RRD_SIZE - 1) % RRD_SIZE]);
@@ -293,7 +293,7 @@
ObjectMapper om = new ObjectMapper();
ObjectNode o = om.createObjectNode();
- o.put("node-id", ncConfig.nodeId);
+ o.put("node-id", ncConfig.getNodeId());
if (includeConfig) {
o.put("os-name", osName);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java
index 77b9b17..a8b03bc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/application/CCApplicationContext.java
@@ -27,9 +27,10 @@
import java.util.Map;
import java.util.Set;
-import org.apache.hyracks.api.application.IApplicationConfig;
import org.apache.hyracks.api.application.ICCApplicationContext;
import org.apache.hyracks.api.application.IClusterLifecycleListener;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.context.ICCContext;
import org.apache.hyracks.api.exceptions.HyracksException;
import org.apache.hyracks.api.job.IJobLifecycleListener;
@@ -105,7 +106,7 @@
clusterLifecycleListeners.add(clusterLifecycleListener);
}
- public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) throws HyracksException {
+ public void notifyNodeJoin(String nodeId, Map<IOption, Object> ncConfiguration) throws HyracksException {
for (IClusterLifecycleListener l : clusterLifecycleListeners) {
l.notifyNodeJoin(nodeId, ncConfiguration);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index 354019c..d6d8bc4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -137,7 +137,7 @@
Map.Entry<String, NodeControllerState> entry = nodeIterator.next();
String nodeId = entry.getKey();
NodeControllerState state = entry.getValue();
- if (state.incrementLastHeartbeatDuration() >= ccConfig.maxHeartbeatLapsePeriods) {
+ if (state.incrementLastHeartbeatDuration() >= ccConfig.getHeartbeatMaxMisses()) {
deadNodes.add(nodeId);
affectedJobIds.addAll(state.getActiveJobIds());
// Removes the node from node map.
@@ -172,10 +172,7 @@
// Retrieves the IP address for a given node.
private InetAddress getIpAddress(NodeControllerState ncState) throws HyracksException {
- String ipAddress = ncState.getNCConfig().dataIPAddress;
- if (ncState.getNCConfig().dataPublicIPAddress != null) {
- ipAddress = ncState.getNCConfig().dataPublicIPAddress;
- }
+ String ipAddress = ncState.getNCConfig().getDataPublicAddress();
try {
return InetAddress.getByName(ipAddress);
} catch (UnknownHostException e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
index 031303b..b35de3d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobManager.java
@@ -68,13 +68,13 @@
this.ccs = ccs;
this.jobCapacityController = jobCapacityController;
try {
- Constructor<?> jobQueueConstructor = this.getClass().getClassLoader().loadClass(ccConfig.jobQueueClassName)
+ Constructor<?> jobQueueConstructor = this.getClass().getClassLoader().loadClass(ccConfig.getJobQueueClass())
.getConstructor(IJobManager.class, IJobCapacityController.class);
jobQueue = (IJobQueue) jobQueueConstructor.newInstance(this, this.jobCapacityController);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
| InvocationTargetException e) {
if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.log(Level.WARNING, "class " + ccConfig.jobQueueClassName + " could not be used: ", e);
+ LOGGER.log(Level.WARNING, "class " + ccConfig.getJobQueueClass() + " could not be used: ", e);
}
// Falls back to the default implementation if the user-provided class name is not valid.
jobQueue = new FIFOJobQueue(this, jobCapacityController);
@@ -85,13 +85,13 @@
@Override
protected boolean removeEldestEntry(Map.Entry<JobId, JobRun> eldest) {
- return size() > ccConfig.jobHistorySize;
+ return size() > ccConfig.getJobHistorySize();
}
};
runMapHistory = new LinkedHashMap<JobId, List<Exception>>() {
private static final long serialVersionUID = 1L;
/** history size + 1 is for the case when history size = 0 */
- private int allowedSize = 100 * (ccConfig.jobHistorySize + 1);
+ private int allowedSize = 100 * (ccConfig.getJobHistorySize() + 1);
@Override
protected boolean removeEldestEntry(Map.Entry<JobId, List<Exception>> eldest) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
index 0577002..3dec959 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
@@ -27,28 +27,26 @@
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.RuntimeMXBean;
import java.lang.management.ThreadMXBean;
-import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Date;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.hyracks.control.cc.NodeControllerState;
-import org.apache.hyracks.control.cc.cluster.INodeManager;
-import org.apache.hyracks.control.common.controllers.CCConfig;
-import org.apache.hyracks.control.common.utils.PidHelper;
-import org.apache.hyracks.control.common.work.IPCResponder;
-import org.apache.hyracks.control.common.work.SynchronizableWork;
-import org.kohsuke.args4j.Option;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.cc.NodeControllerState;
+import org.apache.hyracks.control.cc.cluster.INodeManager;
+import org.apache.hyracks.control.common.config.ConfigUtils;
+import org.apache.hyracks.control.common.controllers.CCConfig;
+import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.hyracks.control.common.utils.PidHelper;
+import org.apache.hyracks.control.common.work.IPCResponder;
+import org.apache.hyracks.control.common.work.SynchronizableWork;
public class GetNodeDetailsJSONWork extends SynchronizableWork {
- private static final Logger LOGGER = Logger.getLogger(GetNodeDetailsJSONWork.class.getName());
+ private static final Section [] CC_SECTIONS = { Section.CC, Section.COMMON };
+ private static final Section [] NC_SECTIONS = { Section.NC, Section.COMMON };
+
private final INodeManager nodeManager;
private final CCConfig ccConfig;
private final String nodeId;
@@ -59,7 +57,7 @@
private ObjectMapper om = new ObjectMapper();
public GetNodeDetailsJSONWork(INodeManager nodeManager, CCConfig ccConfig, String nodeId, boolean includeStats,
- boolean includeConfig, IPCResponder<String> callback) {
+ boolean includeConfig, IPCResponder<String> callback) {
this.nodeManager = nodeManager;
this.ccConfig = ccConfig;
this.nodeId = nodeId;
@@ -69,7 +67,7 @@
}
public GetNodeDetailsJSONWork(INodeManager nodeManager, CCConfig ccConfig, String nodeId, boolean includeStats,
- boolean includeConfig) {
+ boolean includeConfig) {
this(nodeManager, ccConfig, nodeId, includeStats, includeConfig, null);
}
@@ -79,14 +77,18 @@
// null nodeId is a request for CC
detail = getCCDetails();
if (includeConfig) {
- addIni(detail, ccConfig);
+ ConfigUtils.addConfigToJSON(detail, ccConfig.getAppConfig(), CC_SECTIONS);
+ detail.putPOJO("app.args", ccConfig.getAppArgs());
}
} else {
NodeControllerState ncs = nodeManager.getNodeControllerState(nodeId);
if (ncs != null) {
detail = ncs.toDetailedJSON(includeStats, includeConfig);
if (includeConfig) {
- addIni(detail, ncs.getNCConfig());
+ final NCConfig ncConfig = ncs.getNCConfig();
+ ConfigUtils.addConfigToJSON(detail, ncConfig.getConfigManager().getNodeEffectiveConfig(nodeId),
+ NC_SECTIONS);
+ detail.putPOJO("app.args", ncConfig.getAppArgs());
}
}
}
@@ -96,7 +98,7 @@
}
}
- private ObjectNode getCCDetails() {
+ private ObjectNode getCCDetails() {
ObjectNode o = om.createObjectNode();
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
List<GarbageCollectorMXBean> gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
@@ -151,33 +153,6 @@
return o;
}
- private static void addIni(ObjectNode o, Object configBean) {
- Map<String, Object> iniMap = new HashMap<>();
- for (Field f : configBean.getClass().getFields()) {
- Option option = f.getAnnotation(Option.class);
- if (option == null) {
- continue;
- }
- final String optionName = option.name();
- Object value = null;
- try {
- value = f.get(configBean);
- } catch (IllegalAccessException e) {
- LOGGER.log(Level.WARNING, "Unable to access ini option " + optionName, e);
- }
- if (value != null) {
- if ("--".equals(optionName)) {
- iniMap.put("app_args", value);
- } else {
- iniMap.put(optionName.substring(1).replace('-', '_'),
- "-iodevices".equals(optionName)
- ? String.valueOf(value).split(",")
- : value);
- }
- }
- }
- o.putPOJO("ini", iniMap);
- }
public ObjectNode getDetail() {
return detail;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
index dc93515..e97950e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
@@ -23,6 +23,8 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.cc.NodeControllerState;
import org.apache.hyracks.control.cc.cluster.INodeManager;
@@ -50,19 +52,22 @@
String id = reg.getNodeId();
IIPCHandle ncIPCHandle = ccs.getClusterIPC().getHandle(reg.getNodeControllerAddress());
CCNCFunctions.NodeRegistrationResult result;
- Map<String, String> ncConfiguration = new HashMap<>();
+ Map<IOption, Object> ncConfiguration = new HashMap<>();
try {
INodeController nodeController = new NodeControllerRemoteProxy(ncIPCHandle);
NodeControllerState state = new NodeControllerState(nodeController, reg);
INodeManager nodeManager = ccs.getNodeManager();
nodeManager.addNode(id, state);
- state.getNCConfig().toMap(ncConfiguration);
+ IApplicationConfig cfg = state.getNCConfig().getConfigManager().getNodeEffectiveConfig(id);
+ for (IOption option : cfg.getOptions()) {
+ ncConfiguration.put(option, cfg.get(option));
+ }
LOGGER.log(Level.INFO, "Registered INodeController: id = " + id);
NodeParameters params = new NodeParameters();
params.setClusterControllerInfo(ccs.getClusterControllerInfo());
params.setDistributedState(ccs.getApplicationContext().getDistributedState());
- params.setHeartbeatPeriod(ccs.getCCConfig().heartbeatPeriod);
- params.setProfileDumpPeriod(ccs.getCCConfig().profileDumpPeriod);
+ params.setHeartbeatPeriod(ccs.getCCConfig().getHeartbeatPeriod());
+ params.setProfileDumpPeriod(ccs.getCCConfig().getProfileDumpPeriod());
result = new CCNCFunctions.NodeRegistrationResult(params, null);
} catch (Exception e) {
result = new CCNCFunctions.NodeRegistrationResult(null, e);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
index a7bca25..ab526e8 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
@@ -27,7 +27,9 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.control.cc.ClusterControllerService;
+import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
import org.apache.hyracks.control.common.work.AbstractWork;
import org.ini4j.Ini;
@@ -79,14 +81,18 @@
/**
* Given an Ini object, serialize it to String with some enhancements.
- * @param ccini
+ * @param ccini the ini file to decorate and forward to NC
*/
- String serializeIni(Ini ccini) throws IOException {
+ private String serializeIni(Ini ccini) throws IOException {
StringWriter iniString = new StringWriter();
- ccini.store(iniString);
+ ccini.get(Section.NC.sectionName()).putIfAbsent(NCConfig.Option.CLUSTER_ADDRESS.ini(),
+ ccs.getCCConfig().getClusterPublicAddress());
+ ccini.get(Section.NC.sectionName()).putIfAbsent(NCConfig.Option.CLUSTER_PORT.ini(),
+ String.valueOf(ccs.getCCConfig().getClusterPublicPort()));
// Finally insert *this* NC's name into localnc section - this is a fixed
// entry point so that NCs can determine where all their config is.
- iniString.append("\n[localnc]\nid=").append(ncId).append("\n");
+ ccini.put(Section.LOCALNC.sectionName(), NCConfig.Option.NODE_ID.ini(), ncId);
+ ccini.store(iniString);
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine("Returning Ini file:\n" + iniString.toString());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/cluster/NodeManagerTest.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/cluster/NodeManagerTest.java
index c742a4a..dde3bad 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/cluster/NodeManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/cluster/NodeManagerTest.java
@@ -46,8 +46,8 @@
public void testNormal() throws HyracksException {
IResourceManager resourceManager = new ResourceManager();
INodeManager nodeManager = new NodeManager(makeCCConfig(), resourceManager);
- NodeControllerState ncState1 = mockNodeControllerState(false);
- NodeControllerState ncState2 = mockNodeControllerState(false);
+ NodeControllerState ncState1 = mockNodeControllerState(NODE1, false);
+ NodeControllerState ncState2 = mockNodeControllerState(NODE2, false);
// Verifies states after adding nodes.
nodeManager.addNode(NODE1, ncState1);
@@ -71,7 +71,7 @@
public void testException() throws HyracksException {
IResourceManager resourceManager = new ResourceManager();
INodeManager nodeManager = new NodeManager(makeCCConfig(), resourceManager);
- NodeControllerState ncState1 = mockNodeControllerState(true);
+ NodeControllerState ncState1 = mockNodeControllerState(NODE1, true);
boolean invalidNetworkAddress = false;
// Verifies states after a failure during adding nodes.
@@ -106,11 +106,11 @@
private CCConfig makeCCConfig() {
CCConfig ccConfig = new CCConfig();
- ccConfig.maxHeartbeatLapsePeriods = 0;
+ ccConfig.setHeartbeatMaxMisses(0);
return ccConfig;
}
- private NodeControllerState mockNodeControllerState(boolean invalidIpAddr) {
+ private NodeControllerState mockNodeControllerState(String nodeId, boolean invalidIpAddr) {
NodeControllerState ncState = mock(NodeControllerState.class);
String ipAddr = invalidIpAddr ? "255.255.255:255" : "127.0.0.2";
NetworkAddress dataAddr = new NetworkAddress(ipAddr, 1001);
@@ -120,8 +120,8 @@
when(ncState.getDataPort()).thenReturn(dataAddr);
when(ncState.getDatasetPort()).thenReturn(resultAddr);
when(ncState.getMessagingPort()).thenReturn(msgAddr);
- NCConfig ncConfig = new NCConfig();
- ncConfig.dataIPAddress = ipAddr;
+ NCConfig ncConfig = new NCConfig(nodeId);
+ ncConfig.setDataPublicAddress(ipAddr);
when(ncState.getNCConfig()).thenReturn(ncConfig);
return ncState;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/job/JobManagerTest.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/job/JobManagerTest.java
index 3bb08bd..97b05e7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/job/JobManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/test/java/org/apache/hyracks/control/cc/job/JobManagerTest.java
@@ -27,6 +27,7 @@
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
@@ -48,14 +49,23 @@
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.control.common.logs.LogFile;
import org.junit.Assert;
+import org.junit.Before;
import org.junit.Test;
+import org.kohsuke.args4j.CmdLineException;
import org.mockito.Mockito;
public class JobManagerTest {
+ private CCConfig ccConfig;
+
+ @Before
+ public void setup() throws IOException, CmdLineException {
+ ccConfig = new CCConfig();
+ ccConfig.getConfigManager().processConfig();
+ }
+
@Test
- public void test() throws HyracksException {
- CCConfig ccConfig = new CCConfig();
+ public void test() throws IOException, CmdLineException {
IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
IJobManager jobManager = spy(new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController));
@@ -114,7 +124,7 @@
}
Assert.assertTrue(jobManager.getRunningJobs().size() == 4096);
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
- Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.jobHistorySize);
+ Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.getJobHistorySize());
// Completes deferred jobs.
for (JobRun run : deferredRuns) {
@@ -123,14 +133,13 @@
}
Assert.assertTrue(jobManager.getRunningJobs().isEmpty());
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
- Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.jobHistorySize);
+ Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.getJobHistorySize());
verify(jobManager, times(8192)).prepareComplete(any(), any(), any());
verify(jobManager, times(8192)).finalComplete(any());
}
@Test
public void testExceedMax() throws HyracksException {
- CCConfig ccConfig = new CCConfig();
IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
IJobManager jobManager = spy(new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController));
boolean rejected = false;
@@ -154,7 +163,6 @@
@Test
public void testAdmitThenReject() throws HyracksException {
- CCConfig ccConfig = new CCConfig();
IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
IJobManager jobManager = spy(new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController));
@@ -185,7 +193,6 @@
@Test
public void testNullJob() throws HyracksException {
- CCConfig ccConfig = new CCConfig();
IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
IJobManager jobManager = new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController);
boolean invalidParameter = false;
@@ -249,7 +256,7 @@
}
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
- Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.jobHistorySize);
+ Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.getJobHistorySize());
verify(jobManager, times(0)).prepareComplete(any(), any(), any());
verify(jobManager, times(0)).finalComplete(any());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
index 08783cc..bd6960a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -48,7 +48,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
@@ -67,5 +66,14 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-util</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-collections4</artifactId>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ApplicationContext.java
index 06bcda3..42bc636 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ApplicationContext.java
@@ -21,7 +21,7 @@
import java.io.Serializable;
import java.util.concurrent.ThreadFactory;
-import org.apache.hyracks.api.application.IApplicationConfig;
+import org.apache.hyracks.api.config.IApplicationConfig;
import org.apache.hyracks.api.application.IApplicationContext;
import org.apache.hyracks.api.job.IJobSerializerDeserializerContainer;
import org.apache.hyracks.api.job.JobSerializerDeserializerContainer;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
new file mode 100644
index 0000000..92e90e7
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.application;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Set;
+import java.util.function.Predicate;
+
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.ConfigManager;
+
+/**
+ * An implementation of IApplicationConfig which is backed by the Config Manager.
+ */
+public class ConfigManagerApplicationConfig implements IApplicationConfig, Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private final ConfigManager configManager;
+
+ public ConfigManagerApplicationConfig(ConfigManager configManager) {
+ this.configManager = configManager;
+ }
+
+ @Override
+ public String getString(String section, String key) {
+ return (String)get(section, key);
+ }
+
+ @Override
+ public int getInt(String section, String key) {
+ return (int)get(section, key);
+ }
+
+ @Override
+ public long getLong(String section, String key) {
+ return (long)get(section, key);
+ }
+
+ @Override
+ public Set<String> getSectionNames() {
+ return configManager.getSectionNames();
+ }
+
+ @Override
+ public Set<Section> getSections() {
+ return configManager.getSections();
+ }
+
+ @Override
+ public Set<Section> getSections(Predicate<Section> predicate) {
+ return configManager.getSections(predicate);
+ }
+
+ @Override
+ public Set<String> getKeys(String section) {
+ return configManager.getOptionNames(section);
+ }
+
+ private Object get(String section, String key) {
+ return get(configManager.lookupOption(section, key));
+ }
+
+ @Override
+ public Object getStatic(IOption option) {
+ return configManager.get(option);
+ }
+
+ @Override
+ public List<String> getNCNames() {
+ return configManager.getNodeNames();
+ }
+
+ @Override
+ public IOption lookupOption(String sectionName, String propertyName) {
+ return configManager.lookupOption(sectionName, propertyName);
+ }
+
+ @Override
+ public Set<IOption> getOptions() {
+ return configManager.getOptions();
+ }
+
+ @Override
+ public Set<IOption> getOptions(Section section) {
+ return configManager.getOptions(section);
+ }
+
+ @Override
+ public IApplicationConfig getNCEffectiveConfig(String nodeId) {
+ return configManager.getNodeEffectiveConfig(nodeId);
+ }
+
+ public ConfigManager getConfigManager() {
+ return configManager;
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/IniApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/IniApplicationConfig.java
deleted file mode 100644
index 53db11c..0000000
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/IniApplicationConfig.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.control.common.application;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hyracks.api.application.IApplicationConfig;
-import org.apache.hyracks.control.common.controllers.IniUtils;
-import org.ini4j.Ini;
-import org.ini4j.Profile.Section;
-
-/**
- * An implementation of IApplicationConfig which is backed by Ini4j.
- */
-public class IniApplicationConfig implements IApplicationConfig {
- private final Ini ini;
-
- public IniApplicationConfig(Ini ini) {
- if (ini != null) {
- this.ini = ini;
- } else {
- this.ini = new Ini();
- }
- }
-
- @Override
- public String getString(String section, String key) {
- return IniUtils.getString(ini, section, key, null);
- }
-
- @Override
- public String getString(String section, String key, String defaultValue) {
- return IniUtils.getString(ini, section, key, defaultValue);
- }
-
- @Override
- public String[] getStringArray(String section, String key) {
- return IniUtils.getStringArray(ini, section, key);
- }
-
- @Override
- public int getInt(String section, String key) {
- return IniUtils.getInt(ini, section, key, 0);
- }
-
- @Override
- public int getInt(String section, String key, int defaultValue) {
- return IniUtils.getInt(ini, section, key, defaultValue);
- }
-
- @Override
- public long getLong(String section, String key) {
- return IniUtils.getLong(ini, section, key, 0);
- }
-
- @Override
- public long getLong(String section, String key, long defaultValue) {
- return IniUtils.getLong(ini, section, key, defaultValue);
- }
-
- @Override
- public Set<String> getSections() {
- return ini.keySet();
- }
-
- @Override
- public Set<String> getKeys(String section) {
- return ini.get(section).keySet();
- }
-
- @Override
- public List<Set<Map.Entry<String, String>>> getMultiSections(String section) {
- List<Set<Map.Entry<String, String>>> list = new ArrayList<>();
- List<Section> secs = getMulti(section);
- if (secs != null) {
- for (Section sec : secs) {
- list.add(sec.entrySet());
- }
- }
- return list;
- }
-
- private List<Section> getMulti(String section) {
- return ini.getAll(section);
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jArgument.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jArgument.java
new file mode 100644
index 0000000..de9b543
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jArgument.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.config;
+
+import java.lang.annotation.Annotation;
+
+import org.kohsuke.args4j.Argument;
+import org.kohsuke.args4j.spi.OptionHandler;
+import org.kohsuke.args4j.spi.StringOptionHandler;
+
+@SuppressWarnings("ClassExplicitlyAnnotation")
+public class Args4jArgument implements Argument {
+ @Override
+ public String usage() {
+ return "";
+ }
+
+ @Override
+ public String metaVar() {
+ return "";
+ }
+
+ @Override
+ public boolean required() {
+ return false;
+ }
+
+ @Override
+ public boolean hidden() {
+ return false;
+ }
+
+ @Override
+ public Class<? extends OptionHandler> handler() {
+ return StringOptionHandler.class;
+ }
+
+ @Override
+ public int index() {
+ return 0;
+ }
+
+ @Override
+ public boolean multiValued() {
+ return true;
+ }
+
+ @Override
+ public Class<? extends Annotation> annotationType() {
+ return Argument.class;
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jOption.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jOption.java
new file mode 100644
index 0000000..c904d0b
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jOption.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.config;
+
+import java.lang.annotation.Annotation;
+
+import org.apache.hyracks.api.config.IOption;
+import org.kohsuke.args4j.Option;
+import org.kohsuke.args4j.spi.ExplicitBooleanOptionHandler;
+import org.kohsuke.args4j.spi.IntOptionHandler;
+import org.kohsuke.args4j.spi.OptionHandler;
+import org.kohsuke.args4j.spi.StringOptionHandler;
+
+@SuppressWarnings("ClassExplicitlyAnnotation")
+class Args4jOption implements Option {
+ private final IOption option;
+ private final ConfigManager configManager;
+ private final Class targetType;
+
+ Args4jOption(IOption option, ConfigManager configManager, Class targetType) {
+ this.option = option;
+ this.targetType = targetType;
+ this.configManager = configManager;
+ }
+
+ @Override
+ public String name() {
+ return option.cmdline();
+ }
+
+ @Override
+ public String[] aliases() {
+ return new String[0];
+ }
+
+ @Override
+ public String usage() {
+ return configManager.getUsage(option);
+ }
+
+ @Override
+ public String metaVar() {
+ return "";
+ }
+
+ @Override
+ public boolean required() {
+ return false;
+ }
+
+ @Override
+ public boolean help() {
+ return false;
+ }
+
+ @Override
+ public boolean hidden() {
+ return option.hidden();
+ }
+
+ @Override
+ public Class<? extends OptionHandler> handler() {
+ if (targetType.equals(Boolean.class)) {
+ return ExplicitBooleanOptionHandler.class;
+ } else if (targetType.equals(Integer.class)) {
+ return IntOptionHandler.class;
+ } else {
+ return StringOptionHandler.class;
+ }
+ }
+
+ @Override
+ public String[] depends() {
+ return new String[0];
+ }
+
+ @Override
+ public String[] forbids() {
+ return new String[0];
+ }
+
+ @Override
+ public Class<? extends Annotation> annotationType() {
+ return Option.class;
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jSetter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jSetter.java
new file mode 100644
index 0000000..1367ef0
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/Args4jSetter.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.config;
+
+import java.lang.reflect.AnnotatedElement;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
+
+import org.apache.hyracks.api.config.IOption;
+import org.kohsuke.args4j.CmdLineException;
+import org.kohsuke.args4j.spi.FieldSetter;
+import org.kohsuke.args4j.spi.Setter;
+
+class Args4jSetter implements Setter {
+ private final IOption option;
+ private BiConsumer<IOption, Object> consumer;
+ private final boolean multiValued;
+ private final Class type;
+
+ Args4jSetter(IOption option, BiConsumer<IOption, Object> consumer, boolean multiValued) {
+ this.option = option;
+ this.consumer = consumer;
+ this.multiValued = multiValued;
+ this.type = option.type().targetType();
+ }
+
+ Args4jSetter(Consumer<Object> consumer, boolean multiValued, Class type) {
+ this.option = null;
+ this.consumer = (o, value) -> consumer.accept(value);
+ this.multiValued = multiValued;
+ this.type = type;
+ }
+
+ @Override
+ public void addValue(Object value) throws CmdLineException {
+ consumer.accept(option, value);
+ }
+
+ @Override
+ public Class getType() {
+ return type;
+ }
+
+ @Override
+ public boolean isMultiValued() {
+ return multiValued;
+ }
+
+ @Override
+ public FieldSetter asFieldSetter() {
+ return null;
+ }
+
+ @Override
+ public AnnotatedElement asAnnotatedElement() {
+ return null;
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
new file mode 100644
index 0000000..bfe759e
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -0,0 +1,549 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.config;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.function.Supplier;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.commons.collections4.map.CompositeMap;
+import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IConfigManager;
+import org.apache.hyracks.api.config.IConfigurator;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.control.common.application.ConfigManagerApplicationConfig;
+import org.ini4j.Ini;
+import org.ini4j.Profile;
+import org.kohsuke.args4j.CmdLineException;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+import org.kohsuke.args4j.OptionHandlerFilter;
+
+public class ConfigManager implements IConfigManager, Serializable {
+
+ private static final long serialVersionUID = 1L;
+ private static final Logger LOGGER = Logger.getLogger(ConfigManager.class.getName());
+
+ private HashSet<IOption> registeredOptions = new HashSet<>();
+ private HashMap<IOption, Object> definedMap = new HashMap<>();
+ private HashMap<IOption, Object> defaultMap = new HashMap<>();
+ private CompositeMap<IOption, Object> configurationMap = new CompositeMap<>(definedMap, defaultMap,
+ new NoOpMapMutator());
+ private EnumMap<Section, Map<String, IOption>> sectionMap = new EnumMap<>(Section.class);
+ private TreeMap<String, Map<IOption, Object>> nodeSpecificMap = new TreeMap<>();
+ private transient ArrayListValuedHashMap<IOption, IConfigSetter> optionSetters = new ArrayListValuedHashMap<>();
+ private final String[] args;
+ private ConfigManagerApplicationConfig appConfig = new ConfigManagerApplicationConfig(this);
+ private Set<String> allSections = new HashSet<>();
+ private transient Collection<Consumer<List<String>>> argListeners = new ArrayList<>();
+ private transient Collection<IOption> iniPointerOptions = new ArrayList<>();
+ private transient Collection<Section> cmdLineSections = new ArrayList<>();;
+ private transient OptionHandlerFilter usageFilter;
+ private transient SortedMap<Integer, List<IConfigurator>> configurators = new TreeMap<>();
+ private boolean configured;
+
+ public ConfigManager() {
+ this(null);
+ }
+
+ public ConfigManager(String[] args) {
+ this.args = args;
+ for (Section section : Section.values()) {
+ allSections.add(section.sectionName());
+ }
+ addConfigurator(PARSE_INI_POINTERS_METRIC, this::extractIniPointersFromCommandLine);
+ addConfigurator(PARSE_INI_METRIC, this::parseIni);
+ addConfigurator(PARSE_COMMAND_LINE_METRIC, this::processCommandLine);
+ addConfigurator(APPLY_DEFAULTS_METRIC, this::applyDefaults);
+ }
+
+ @Override
+ public void addConfigurator(int metric, IConfigurator configurator) {
+ configurators.computeIfAbsent(metric, metric1 -> new ArrayList<>()).add(configurator);
+ }
+
+ @Override
+ public void addIniParamOptions(IOption... options) {
+ Stream.of(options).forEach(iniPointerOptions::add);
+ }
+
+ @Override
+ public void addCmdLineSections(Section... sections) {
+ Stream.of(sections).forEach(cmdLineSections::add);
+ }
+
+ @Override
+ public void setUsageFilter(OptionHandlerFilter usageFilter) {
+ this.usageFilter = usageFilter;
+ }
+
+ @Override
+ public void register(IOption... options) {
+ for (IOption option : options) {
+ if (option.section() == Section.VIRTUAL || registeredOptions.contains(option)) {
+ continue;
+ }
+ if (configured) {
+ throw new IllegalStateException("configuration already processed");
+ }
+ LOGGER.fine("registering option: " + option.toIniString());
+ Map<String, IOption> optionMap = sectionMap.computeIfAbsent(option.section(), section -> new HashMap<>());
+ IOption prev = optionMap.put(option.ini(), option);
+ if (prev != null) {
+ if (prev != option) {
+ throw new IllegalStateException("An option cannot be defined multiple times: "
+ + option.toIniString() + ": " + Arrays.asList(option.getClass(), prev.getClass()));
+ }
+ } else {
+ registeredOptions.add(option);
+ optionSetters.put(option, (node, value, isDefault) -> correctedMap(node, isDefault).put(option, value));
+ if (LOGGER.isLoggable(Level.FINE)) {
+ optionSetters.put(option, (node, value, isDefault) -> LOGGER
+ .fine((isDefault ? "defaulting" : "setting ") + option.toIniString() + " to " + value));
+ }
+ }
+ }
+ }
+
+ private Map<IOption, Object> correctedMap(String node, boolean isDefault) {
+ return node == null ? (isDefault ? defaultMap : definedMap)
+ : nodeSpecificMap.computeIfAbsent(node, this::createNodeSpecificMap);
+ }
+
+ public void registerVirtualNode(String nodeId) {
+ LOGGER.fine("registerVirtualNode: " + nodeId);
+ nodeSpecificMap.computeIfAbsent(nodeId, this::createNodeSpecificMap);
+ }
+
+ private Map<IOption, Object> createNodeSpecificMap(String nodeId) {
+ LOGGER.fine("createNodeSpecificMap: " + nodeId);
+ return new HashMap<>();
+ }
+
+ @Override
+ @SafeVarargs
+ public final void register(final Class<? extends IOption>... optionClasses) {
+ for (Class<? extends IOption> optionClass : optionClasses) {
+ register(optionClass.getEnumConstants());
+ }
+ }
+
+ public IOption lookupOption(String section, String key) {
+ Map<String, IOption> map = getSectionOptionMap(Section.parseSectionName(section));
+ return map == null ? null : map.get(key);
+ }
+
+ public void processConfig()
+ throws CmdLineException, IOException {
+ if (!configured) {
+ for (List<IConfigurator> configuratorList : configurators.values()) {
+ for (IConfigurator configurator : configuratorList) {
+ configurator.run();
+ }
+ }
+ configured = true;
+ }
+ }
+
+ private void processCommandLine() throws CmdLineException {
+ List<String> appArgs = processCommandLine(cmdLineSections, usageFilter, this::cmdLineSet);
+ // now propagate the app args to the listeners...
+ argListeners.forEach(l -> l.accept(appArgs));
+ }
+
+ private void extractIniPointersFromCommandLine() throws CmdLineException {
+ Map<IOption, Object> cmdLineOptions = new HashMap<>();
+ processCommandLine(cmdLineSections, usageFilter, cmdLineOptions::put);
+ for (IOption option : iniPointerOptions) {
+ if (cmdLineOptions.containsKey(option)) {
+ set(option, cmdLineOptions.get(option));
+ }
+ }
+ }
+
+ private void cmdLineSet(IOption option, Object value) {
+ invokeSetters(option, option.type().parse(String.valueOf(value)), null);
+ }
+
+ private void invokeSetters(IOption option, Object value, String nodeId) {
+ optionSetters.get(option).forEach(setter -> setter.set(nodeId, value, false));
+ }
+
+ @SuppressWarnings({ "squid:S106", "squid:S1147" }) // use of System.err, System.exit()
+ private List<String> processCommandLine(Collection<Section> sections, OptionHandlerFilter usageFilter,
+ BiConsumer<IOption, Object> setAction)
+ throws CmdLineException {
+ final Args4jBean bean = new Args4jBean();
+ CmdLineParser cmdLineParser = new CmdLineParser(bean);
+ final List<String> appArgs = new ArrayList<>();
+ List<IOption> commandLineOptions = new ArrayList<>();
+ for (Map.Entry<Section, Map<String, IOption>> sectionMapEntry : sectionMap.entrySet()) {
+ if (!sections.contains(sectionMapEntry.getKey())) {
+ continue;
+ }
+ for (IOption option : sectionMapEntry.getValue().values()) {
+ if (option.section() != Section.VIRTUAL) {
+ commandLineOptions.add(option);
+ }
+ }
+ }
+ commandLineOptions.sort(Comparator.comparing(IOption::cmdline));
+
+ commandLineOptions.forEach(option -> cmdLineParser.addOption(new Args4jSetter(option, setAction, false),
+ new Args4jOption(option, this, option.type().targetType())));
+
+ if (!argListeners.isEmpty()) {
+ cmdLineParser.addArgument(new Args4jSetter(o -> appArgs.add(String.valueOf(o)), true, String.class),
+ new Args4jArgument());
+ }
+ LOGGER.fine("parsing cmdline: " + Arrays.toString(args));
+ try {
+ if (args == null || args.length == 0) {
+ LOGGER.info("no command line args supplied");
+ return appArgs;
+ }
+ cmdLineParser.parseArgument(args);
+ if (bean.help) {
+ ConfigUtils.printUsage(cmdLineParser, usageFilter, System.err);
+ System.exit(0);
+ }
+ } catch (CmdLineException e) {
+ if (bean.help) {
+ ConfigUtils.printUsage(cmdLineParser, usageFilter, System.err);
+ System.exit(0);
+ } else {
+ ConfigUtils.printUsage(e, usageFilter, System.err);
+ throw e;
+ }
+ }
+ return appArgs;
+ }
+
+ private void parseIni() throws IOException {
+ Ini ini = null;
+ for (IOption option : iniPointerOptions) {
+ Object pointer = get(option);
+ if (pointer instanceof String) {
+ ini = ConfigUtils.loadINIFile((String)pointer);
+ } else if (pointer instanceof URL) {
+ ini = ConfigUtils.loadINIFile((URL)pointer);
+ } else if (pointer != null) {
+ throw new IllegalArgumentException("config file pointer options must be of type String (for file) or " +
+ "URL, instead of " + option.type().targetType());
+ }
+ }
+ if (ini == null) {
+ LOGGER.info("no INI file specified; skipping parsing");
+ return;
+ }
+ LOGGER.info("parsing INI file: " + ini);
+ for (Profile.Section section : ini.values()) {
+ allSections.add(section.getName());
+ final Section rootSection = Section
+ .parseSectionName(section.getParent() == null ? section.getName() : section.getParent().getName());
+ String node;
+ if (rootSection == Section.EXTENSION) {
+ parseExtensionIniSection(section);
+ continue;
+ } else if (rootSection == Section.NC) {
+ node = section.getName().equals(section.getSimpleName()) ? null : section.getSimpleName();
+ } else if (Section.parseSectionName(section.getName()) != null) {
+ node = null;
+ } else {
+ throw new HyracksException("Unknown section in ini: " + section.getName());
+ }
+ Map<String, IOption> optionMap = getSectionOptionMap(rootSection);
+ for (Map.Entry<String, String> iniOption : section.entrySet()) {
+ String name = iniOption.getKey();
+ final IOption option = optionMap == null ? null : optionMap.get(name);
+ if (option == null) {
+ handleUnknownOption(section, name);
+ return;
+ }
+ final String value = iniOption.getValue();
+ LOGGER.fine("setting " + option.toIniString() + " to " + value);
+ final Object parsed = option.type().parse(value);
+ invokeSetters(option, parsed, node);
+ }
+ }
+ }
+
+ private void parseExtensionIniSection(Profile.Section section) {
+ // TODO(mblow): parse extensions
+ }
+
+ private void handleUnknownOption(Profile.Section section, String name) throws HyracksException {
+ Set<String> matches = new HashSet<>();
+ for (IOption registeredOption : registeredOptions) {
+ if (registeredOption.ini().equals(name)) {
+ matches.add(registeredOption.section().sectionName());
+ }
+ }
+ if (!matches.isEmpty()) {
+ throw new HyracksException(
+ "Section mismatch for [" + section.getName() + "] " + name + ", expected section(s) " + matches);
+ } else {
+ throw new HyracksException("Unknown option in ini: [" + section.getName() + "] " + name);
+ }
+ }
+
+ private void applyDefaults() {
+ LOGGER.fine("applying defaults");
+ for (Map.Entry<Section, Map<String, IOption>> entry : sectionMap.entrySet()) {
+ if (entry.getKey() == Section.NC) {
+ entry.getValue().values().forEach(option -> getNodeNames()
+ .forEach(node -> getOrDefault(getNodeEffectiveMap(node), option, node)));
+ for (Map.Entry<String, Map<IOption, Object>> nodeMap : nodeSpecificMap.entrySet()) {
+ entry.getValue().values()
+ .forEach(option -> getOrDefault(
+ new CompositeMap<>(nodeMap.getValue(), definedMap, new NoOpMapMutator()), option,
+ nodeMap.getKey()));
+ }
+ // also push the defaults to the shared map, if the CC requests NC properties, they should receive the
+ // defaults -- TODO (mblow): seems lame, should log warning on access
+ }
+ entry.getValue().values().forEach(option -> getOrDefault(configurationMap, option, null));
+ }
+ }
+
+ private Object getOrDefault(Map<IOption, Object> map, IOption option, String nodeId) {
+ if (map.containsKey(option)) {
+ return map.get(option);
+ } else {
+ Object value = resolveDefault(option, new ConfigManagerApplicationConfig(this) {
+ @Override
+ public Object getStatic(IOption option) {
+ return getOrDefault(map, option, nodeId);
+ }
+ });
+ if (value != null && optionSetters != null) {
+ optionSetters.get(option).forEach(setter -> setter.set(nodeId, value, true));
+ }
+ return value;
+ }
+ }
+
+ public Object resolveDefault(IOption option, IApplicationConfig applicationConfig) {
+ final Object value = option.defaultValue();
+ if (value instanceof IOption) {
+ return applicationConfig.get((IOption) value);
+ } else if (value instanceof Supplier) {
+ //noinspection unchecked
+ return ((Supplier<?>) value).get();
+ } else if (value instanceof Function) {
+ //noinspection unchecked
+ return ((Function<IApplicationConfig, ?>) value).apply(applicationConfig);
+ } else {
+ return value;
+ }
+ }
+
+ @Override
+ public Set<Section> getSections(Predicate<Section> predicate) {
+ return Arrays.stream(Section.values()).filter(predicate).collect(Collectors.toSet());
+ }
+
+ @Override
+ public Set<Section> getSections() {
+ return getSections(section -> true);
+ }
+
+ public Set<String> getSectionNames() {
+ return Collections.unmodifiableSet(allSections);
+ }
+
+ public Set<String> getOptionNames(String sectionName) {
+ Set<String> optionNames = new HashSet<>();
+ Section section = Section.parseSectionName(sectionName);
+ for (IOption option : getSectionOptionMap(section).values()) {
+ optionNames.add(option.ini());
+ }
+ return optionNames;
+ }
+
+ @Override
+ public Set<IOption> getOptions(Section section) {
+ return getSectionOptionMap(section).values().stream().collect(Collectors.toSet());
+ }
+
+ private Map<String, IOption> getSectionOptionMap(Section section) {
+ final Map<String, IOption> map = sectionMap.get(section);
+ return map != null ? map : Collections.emptyMap();
+ }
+
+ public List<String> getNodeNames() {
+ return Collections.unmodifiableList(new ArrayList(nodeSpecificMap.keySet()));
+ }
+
+ public IApplicationConfig getNodeEffectiveConfig(String nodeId) {
+ final Map<IOption, Object> nodeMap = nodeSpecificMap.computeIfAbsent(nodeId, this::createNodeSpecificMap);
+ Map<IOption, Object> nodeEffectiveMap = getNodeEffectiveMap(nodeId);
+ return new ConfigManagerApplicationConfig(this) {
+ @Override
+ public Object getStatic(IOption option) {
+ if (!nodeEffectiveMap.containsKey(option)) {
+ // we need to calculate the default the the context of the node specific map...
+ nodeMap.put(option, getOrDefault(nodeEffectiveMap, option, nodeId));
+ }
+ return nodeEffectiveMap.get(option);
+ }
+ };
+ }
+
+ private CompositeMap<IOption, Object> getNodeEffectiveMap(String nodeId) {
+ return new CompositeMap<>(nodeSpecificMap.get(nodeId), definedMap, new NoOpMapMutator());
+ }
+
+ public Ini toIni(boolean includeDefaults) {
+ Ini ini = new Ini();
+ for (Map.Entry<IOption, Object> entry : (includeDefaults ? configurationMap : definedMap).entrySet()) {
+ if (entry.getValue() != null) {
+ final IOption option = entry.getKey();
+ ini.add(option.section().sectionName(), option.ini(), option.type().serializeToIni(entry.getValue()));
+ }
+ }
+ for (Map.Entry<String, Map<IOption, Object>> nodeMapEntry : nodeSpecificMap.entrySet()) {
+ String section = Section.NC.sectionName() + "/" + nodeMapEntry.getKey();
+ for (Map.Entry<IOption, Object> entry : nodeMapEntry.getValue().entrySet()) {
+ if (entry.getValue() != null) {
+ final IOption option = entry.getKey();
+ ini.add(section, option.ini(), option.type().serializeToIni(entry.getValue()));
+ }
+ }
+ }
+ return ini;
+ }
+
+ public void set(IOption option, Object value) {
+ set(null, option, value);
+ }
+
+ public void set(String nodeId, IOption option, Object value) {
+ invokeSetters(option, value, nodeId);
+ }
+
+ public Object get(IOption option) {
+ if (!registeredOptions.contains(option)) {
+ throw new IllegalStateException("Option not registered with ConfigManager: " + option.toIniString() + "("
+ + option.getClass() + "." + option + ")");
+ } else if (option.section() == Section.NC) {
+ LOGGER.warning("NC option " + option.toIniString() + " being accessed outside of NC-scoped configuration.");
+ }
+ return getOrDefault(configurationMap, option, null);
+ }
+
+ public Set<IOption> getOptions() {
+ return Collections.unmodifiableSet(registeredOptions);
+ }
+
+ @Override
+ public IApplicationConfig getAppConfig() {
+ return appConfig;
+ }
+
+ public void registerArgsListener(Consumer<List<String>> argListener) {
+ argListeners.add(argListener);
+ }
+
+ String getUsage(IOption option) {
+ final String description = option.description();
+ StringBuilder usage = new StringBuilder();
+ if (description != null && !"".equals(description)) {
+ usage.append(description).append(" ");
+ } else {
+ LOGGER.warning("missing description for option: "
+ + option.getClass().getName().substring(option.getClass().getName().lastIndexOf(".") + 1) + "."
+ + option.name());
+ }
+ usage.append("(default: ");
+ usage.append(defaultTextForUsage(option, IOption::cmdline));
+ usage.append(")");
+ return usage.toString();
+ }
+
+ public String defaultTextForUsage(IOption option, Function<IOption, String> optionPrinter) {
+ StringBuilder buf = new StringBuilder();
+ String override = option.usageDefaultOverride(appConfig, optionPrinter);
+ if (override != null) {
+ buf.append(override);
+ } else {
+ final Object value = option.defaultValue();
+ if (value instanceof IOption) {
+ buf.append("same as ").append(optionPrinter.apply((IOption) value));
+ } else if (value instanceof Function) {
+ // TODO(mblow): defer usage calculation to enable evaluation of function
+ buf.append("<function>");
+ } else {
+ buf.append(option.type().serializeToString(resolveDefault(option, appConfig)));
+ }
+ // TODO(mblow): defer usage calculation to enable inclusion of evaluated actual default
+ }
+ return buf.toString();
+ }
+
+ private static class NoOpMapMutator implements CompositeMap.MapMutator<IOption, Object> {
+ @Override
+ public Object put(CompositeMap<IOption, Object> compositeMap, Map<IOption, Object>[] maps, IOption iOption,
+ Object o) {
+ throw new UnsupportedOperationException("mutations are not allowed");
+ }
+
+ @Override
+ public void putAll(CompositeMap<IOption, Object> compositeMap, Map<IOption, Object>[] maps,
+ Map<? extends IOption, ?> map) {
+ throw new UnsupportedOperationException("mutations are not allowed");
+ }
+
+ @Override
+ public void resolveCollision(CompositeMap<IOption, Object> compositeMap, Map<IOption, Object> map,
+ Map<IOption, Object> map1, Collection<IOption> collection) {
+ // no-op
+ }
+ }
+
+ private static class Args4jBean {
+ @Option(name = "-help", help = true)
+ boolean help;
+ }
+}
\ No newline at end of file
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
new file mode 100644
index 0000000..adf1774
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.config;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.PrintStream;
+import java.lang.reflect.Field;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.function.Predicate;
+
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.control.common.controllers.ControllerConfig;
+import org.ini4j.Ini;
+import org.kohsuke.args4j.CmdLineException;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.OptionHandlerFilter;
+
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+/**
+ * Some utility functions for reading Ini4j objects with default values.
+ * For all getXxx() methods: if the 'section' contains a slash, and the 'key'
+ * is not found in that section, we will search for the key in the section named
+ * by stripping the leaf of the section name (final slash and anything following).
+ * eg. getInt(ini, "nc/red", "dir", null) will first look for the key "dir" in
+ * the section "nc/red", but if it is not found, will look in the section "nc".
+ */
+public class ConfigUtils {
+ private static final int USAGE_WIDTH = 120;
+
+ private ConfigUtils() {
+ }
+
+ private static <T> T getIniValue(Ini ini, String section, String key, T defaultValue, Class<T> clazz) {
+ T value;
+ while (true) {
+ value = ini.get(section, key, clazz);
+ if (value == null) {
+ int idx = section.lastIndexOf('/');
+ if (idx > -1) {
+ section = section.substring(0, idx);
+ continue;
+ }
+ }
+ return (value != null) ? value : defaultValue;
+ }
+ }
+
+ public static String getString(Ini ini, String section, String key, String defaultValue) {
+ return getIniValue(ini, section, key, defaultValue, String.class);
+ }
+
+ public static int getInt(Ini ini, String section, String key, int defaultValue) {
+ return getIniValue(ini, section, key, defaultValue, Integer.class);
+ }
+
+ public static long getLong(Ini ini, String section, String key, long defaultValue) {
+ return getIniValue(ini, section, key, defaultValue, Long.class);
+ }
+
+ public static Ini loadINIFile(String configFile) throws IOException {
+ Ini ini = new Ini();
+ File conffile = new File(configFile);
+ if (!conffile.exists()) {
+ throw new FileNotFoundException(configFile);
+ }
+ ini.load(conffile);
+ return ini;
+ }
+
+ public static Ini loadINIFile(URL configURL) throws IOException {
+ Ini ini = new Ini();
+ ini.load(configURL);
+ return ini;
+ }
+
+ public static Field[] getFields(final Class beanClass, Predicate<Field> predicate) {
+ List<Field> fields = new ArrayList<>();
+ for (Class clazz = beanClass; clazz != null && clazz.getClassLoader() != null
+ && clazz.getClassLoader().getParent() != null; clazz = clazz.getSuperclass()) {
+ for (Field f : clazz.getDeclaredFields()) {
+ if (predicate.test(f)) {
+ fields.add(f);
+ }
+ }
+ }
+ return fields.toArray(new Field[fields.size()]);
+ }
+
+ public static void printUsage(CmdLineParser parser, OptionHandlerFilter filter, PrintStream out) {
+ parser.getProperties().withUsageWidth(USAGE_WIDTH);
+ parser.printUsage(new OutputStreamWriter(out), null, filter);
+ }
+
+ public static void printUsage(CmdLineException e, OptionHandlerFilter filter, PrintStream out) {
+ out.println("ERROR: " + e.getMessage());
+ printUsage(e.getParser(), filter, out);
+ }
+
+ private static String getOptionValue(String[] args, String optionName) {
+ for (int i = 0; i < (args.length - 1); i++) {
+ if (args[i].equals(optionName)) {
+ return args[i + 1];
+ }
+ }
+ return null;
+ }
+
+ public static String getOptionValue(String[] args, IOption option) throws IOException {
+ String value = getOptionValue(args, option.cmdline());
+ if (value == null) {
+ Ini iniFile = null;
+ String configFile = getOptionValue(args, ControllerConfig.Option.CONFIG_FILE.cmdline());
+ String configFileUrl = getOptionValue(args, ControllerConfig.Option.CONFIG_FILE_URL.cmdline());
+ if (configFile != null) {
+ iniFile = loadINIFile(configFile);
+ } else if (configFileUrl != null) {
+ iniFile = loadINIFile(new URL(configFileUrl));
+ }
+ if (iniFile != null) {
+ value = iniFile.get(option.section().sectionName(), option.ini());
+ }
+ }
+ return value;
+ }
+
+ public static String getString(Ini ini, org.apache.hyracks.api.config.Section section,
+ IOption option, String defaultValue) {
+ return getString(ini, section.sectionName(), option.ini(), defaultValue);
+ }
+
+ public static void addConfigToJSON(ObjectNode o, IApplicationConfig cfg,
+ org.apache.hyracks.api.config.Section... sections) {
+ ArrayNode configArray = o.putArray("config");
+ for (org.apache.hyracks.api.config.Section section : cfg.getSections(Arrays.asList(sections)::contains)) {
+ ObjectNode sectionNode = configArray.addObject();
+ Map<String, Object> sectionConfig = getSectionOptionsForJSON(cfg, section, option -> true);
+ sectionNode.put("section", section.sectionName())
+ .putPOJO("properties", sectionConfig);
+ }
+ }
+
+ public static Map<String, Object> getSectionOptionsForJSON(IApplicationConfig cfg,
+ org.apache.hyracks.api.config.Section section,
+ Predicate<IOption> selector) {
+ Map<String, Object> sectionConfig = new TreeMap<>();
+ for (IOption option : cfg.getOptions(section)) {
+ if (selector.test(option)) {
+ sectionConfig.put(option.ini(), option.type().serializeToJSON(cfg.get(option)));
+ }
+ }
+ return sectionConfig;
+ }
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertyInterpreter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/IConfigSetter.java
similarity index 71%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertyInterpreter.java
rename to hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/IConfigSetter.java
index 36f5716..2234cca 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertyInterpreter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/IConfigSetter.java
@@ -16,8 +16,15 @@
* specific language governing permissions and limitations
* under the License.
*/
-package org.apache.asterix.common.config;
+package org.apache.hyracks.control.common.config;
-public interface IPropertyInterpreter<T> {
- public T interpret(String s) throws IllegalArgumentException;
+@FunctionalInterface
+public interface IConfigSetter {
+ void set(String nodeId, Object value, boolean isDefault) throws SetException;
+
+ class SetException extends RuntimeException {
+ public SetException(Exception e) {
+ super(e);
+ }
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
new file mode 100644
index 0000000..fc26b5e
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.config;
+
+import java.net.MalformedURLException;
+import java.util.logging.Level;
+
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.util.StorageUtil;
+
+public class OptionTypes {
+
+ public static final IOptionType<Integer> INTEGER_BYTE_UNIT = new IOptionType<Integer>() {
+ @Override
+ public Integer parse(String s) {
+ long result1 = StorageUtil.getByteValue(s);
+ if (result1 > Integer.MAX_VALUE || result1 < Integer.MIN_VALUE) {
+ throw new IllegalArgumentException(
+ "The given value: " + result1 + " is not within the int range.");
+ }
+ return (int) result1;
+ }
+
+ @Override
+ public Class<Integer> targetType() {
+ return Integer.class;
+ }
+ };
+
+ public static final IOptionType<Long> LONG_BYTE_UNIT = new IOptionType<Long>() {
+ @Override
+ public Long parse(String s) {
+ return StorageUtil.getByteValue(s);
+ }
+
+ @Override
+ public Class<Long> targetType() {
+ return Long.class;
+ }
+ };
+
+ public static final IOptionType<Integer> INTEGER = new IOptionType<Integer>() {
+ @Override
+ public Integer parse(String s) {
+ return Integer.parseInt(s);
+ }
+
+ @Override
+ public Class<Integer> targetType() {
+ return Integer.class;
+ }
+ };
+
+ public static final IOptionType<Double> DOUBLE = new IOptionType<Double>() {
+ @Override
+ public Double parse(String s) {
+ return Double.parseDouble(s);
+ }
+
+ @Override
+ public Class<Double> targetType() {
+ return Double.class;
+ }
+ };
+
+ public static final IOptionType<String> STRING = new IOptionType<String>() {
+ @Override
+ public String parse(String s) {
+ return s;
+ }
+
+ @Override
+ public Class<String> targetType() {
+ return String.class;
+ }
+ };
+
+ public static final IOptionType<Long> LONG = new IOptionType<Long>() {
+ @Override
+ public Long parse(String s) {
+ return Long.parseLong(s);
+ }
+
+ @Override
+ public Class<Long> targetType() {
+ return Long.class;
+ }
+ };
+
+ public static final IOptionType<Boolean> BOOLEAN = new IOptionType<Boolean>() {
+ @Override
+ public Boolean parse(String s) {
+ return Boolean.parseBoolean(s);
+ }
+
+ @Override
+ public Class<Boolean> targetType() {
+ return Boolean.class;
+ }
+ };
+
+ public static final IOptionType<Level> LEVEL = new IOptionType<Level>() {
+ @Override
+ public Level parse(String s) {
+ return Level.parse(s);
+ }
+
+ @Override
+ public Class<Level> targetType() {
+ return Level.class;
+ }
+
+ @Override
+ public Object serializeToJSON(Object value) {
+ return ((Level)value).getName();
+ }
+
+ @Override
+ public String serializeToIni(Object value) {
+ return ((Level)value).getName();
+ }
+ };
+
+ public static final IOptionType<String []> STRING_ARRAY = new IOptionType<String []>() {
+ @Override
+ public String [] parse(String s) {
+ return s.split("\\s*,\\s*");
+ }
+
+ @Override
+ public Class<String []> targetType() {
+ return String [].class;
+ }
+
+ @Override
+ public String serializeToIni(Object value) {
+ return String.join(",", (String [])value);
+ }
+ };
+
+ public static final IOptionType<java.net.URL> URL = new IOptionType<java.net.URL>() {
+ @Override
+ public java.net.URL parse(String s) {
+ try {
+ return new java.net.URL(s);
+ } catch (MalformedURLException e) {
+ throw new IllegalArgumentException(e);
+ }
+ }
+
+ @Override
+ public Class<java.net.URL> targetType() {
+ return java.net.URL.class;
+ }
+ };
+
+
+ private OptionTypes() {
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/context/ServerContext.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/context/ServerContext.java
index d4dc054..ff037f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/context/ServerContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/context/ServerContext.java
@@ -29,7 +29,7 @@
private final ServerType type;
private final File baseDir;
- public ServerContext(ServerType type, File baseDir) throws Exception {
+ public ServerContext(ServerType type, File baseDir) {
this.type = type;
this.baseDir = baseDir;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
index b636a096..fbde58c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
@@ -18,156 +18,160 @@
*/
package org.apache.hyracks.control.common.controllers;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.LONG;
+import static org.apache.hyracks.control.common.config.OptionTypes.STRING;
+
import java.io.File;
-import java.io.IOException;
import java.net.InetAddress;
-import java.net.URL;
+import java.util.ArrayList;
import java.util.List;
+import java.util.function.Supplier;
-import org.apache.hyracks.api.application.IApplicationConfig;
-import org.apache.hyracks.control.common.application.IniApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.util.file.FileUtil;
import org.ini4j.Ini;
-import org.kohsuke.args4j.Argument;
-import org.kohsuke.args4j.Option;
-import org.kohsuke.args4j.spi.StopOptionHandler;
-public class CCConfig {
- @Option(name = "-address", usage = "IP Address for CC (default: localhost)", required = false)
- public String ipAddress = InetAddress.getLoopbackAddress().getHostAddress();
+public class CCConfig extends ControllerConfig {
- @Option(name = "-client-net-ip-address",
- usage = "Sets the IP Address to listen for connections from clients (default: same as -address)",
- required = false)
- public String clientNetIpAddress;
+ public static String defaultDir = System.getProperty("java.io.tmpdir");
- @Option(name = "-client-net-port", usage = "Sets the port to listen for connections from clients (default 1098)")
- public int clientNetPort = 1098;
+ public enum Option implements IOption {
+ APP_CLASS(STRING),
+ ADDRESS(STRING, InetAddress.getLoopbackAddress().getHostAddress()),
+ CLUSTER_LISTEN_ADDRESS(STRING, ADDRESS),
+ CLUSTER_LISTEN_PORT(INTEGER, 1099),
+ CLUSTER_PUBLIC_ADDRESS(STRING, CLUSTER_LISTEN_ADDRESS),
+ CLUSTER_PUBLIC_PORT(INTEGER, CLUSTER_LISTEN_PORT),
+ CLIENT_LISTEN_ADDRESS(STRING, ADDRESS),
+ CLIENT_LISTEN_PORT(INTEGER, 1098),
+ CONSOLE_LISTEN_ADDRESS(STRING, ADDRESS),
+ CONSOLE_LISTEN_PORT(INTEGER, 16001),
+ HEARTBEAT_PERIOD(INTEGER, 10000), // TODO (mblow): add time unit
+ HEARTBEAT_MAX_MISSES(INTEGER, 5),
+ PROFILE_DUMP_PERIOD(INTEGER, 0),
+ JOB_HISTORY_SIZE(INTEGER, 10),
+ RESULT_TTL(LONG, 86400000L), // TODO(mblow): add time unit
+ RESULT_SWEEP_THRESHOLD(LONG, 60000L), // TODO(mblow): add time unit
+ @SuppressWarnings("RedundantCast") // not redundant- false positive from IDEA
+ ROOT_DIR(STRING, (Supplier<String>)() -> FileUtil.joinPath(defaultDir, "ClusterControllerService")),
+ CLUSTER_TOPOLOGY(STRING),
+ JOB_QUEUE_CLASS(STRING, "org.apache.hyracks.control.cc.scheduler.FIFOJobQueue"),
+ JOB_MANAGER_CLASS(STRING, "org.apache.hyracks.control.cc.job.JobManager");
- // QQQ Note that clusterNetIpAddress is *not directly used* yet. Both
- // the cluster listener and the web server listen on "all interfaces".
- // This IP address is only used to instruct the NC on which IP to call in.
- @Option(name = "-cluster-net-ip-address",
- usage = "Sets the IP Address to listen for connections from NCs (default: same as -address)",
- required = false)
- public String clusterNetIpAddress;
+ private final IOptionType parser;
+ private final Object defaultValue;
- @Option(name = "-cluster-net-port",
- usage = "Sets the port to listen for connections from node controllers (default 1099)")
- public int clusterNetPort = 1099;
-
- @Option(name = "-http-port", usage = "Sets the http port for the Cluster Controller (default: 16001)")
- public int httpPort = 16001;
-
- @Option(name = "-heartbeat-period",
- usage = "Sets the time duration between two heartbeats from each node controller in milliseconds" +
- " (default: 10000)")
- public int heartbeatPeriod = 10000;
-
- @Option(name = "-max-heartbeat-lapse-periods",
- usage = "Sets the maximum number of missed heartbeats before a node is marked as dead (default: 5)")
- public int maxHeartbeatLapsePeriods = 5;
-
- @Option(name = "-profile-dump-period", usage = "Sets the time duration between two profile dumps from each node " +
- "controller in milliseconds. 0 to disable. (default: 0)")
- public int profileDumpPeriod = 0;
-
- @Option(name = "-default-max-job-attempts", usage = "Sets the default number of job attempts allowed if not " +
- "specified in the job specification. (default: 5)")
- public int defaultMaxJobAttempts = 5;
-
- @Option(name = "-job-history-size", usage = "Limits the number of historical jobs remembered by the system to " +
- "the specified value. (default: 10)")
- public int jobHistorySize = 10;
-
- @Option(name = "-result-time-to-live", usage = "Limits the amount of time results for asynchronous jobs should " +
- "be retained by the system in milliseconds. (default: 24 hours)")
- public long resultTTL = 86400000;
-
- @Option(name = "-result-sweep-threshold", usage = "The duration within which an instance of the result cleanup " +
- "should be invoked in milliseconds. (default: 1 minute)")
- public long resultSweepThreshold = 60000;
-
- @Option(name = "-cc-root",
- usage = "Sets the root folder used for file operations. (default: ClusterControllerService)")
- public String ccRoot = "ClusterControllerService";
-
- @Option(name = "-cluster-topology", required = false,
- usage = "Sets the XML file that defines the cluster topology. (default: null)")
- public File clusterTopologyDefinition = null;
-
- @Option(name = "-app-cc-main-class", required = false, usage = "Application CC Main Class")
- public String appCCMainClass = null;
-
- @Option(name = "-config-file",
- usage = "Specify path to master configuration file (default: none)", required = false)
- public String configFile = null;
-
- @Option(name = "-job-queue-class-name", usage = "Specify the implementation class name for the job queue. (default:"
- + " org.apache.hyracks.control.cc.scheduler.FIFOJobQueue)",
- required = false)
- public String jobQueueClassName = "org.apache.hyracks.control.cc.scheduler.FIFOJobQueue";
-
- @Option(name = "-job-manager-class-name", usage = "Specify the implementation class name for the job manager. "
- + "(default: org.apache.hyracks.control.cc.job.JobManager)", required = false)
- public String jobManagerClassName = "org.apache.hyracks.control.cc.job.JobManager";
-
- @Argument
- @Option(name = "--", handler = StopOptionHandler.class)
- public List<String> appArgs;
-
- public URL configFileUrl = null;
-
- private Ini ini = null;
-
- private void loadINIFile() throws IOException {
- // This method simply maps from the ini parameters to the CCConfig's fields.
- // It does not apply defaults or any logic.
- if (configFile != null) {
- ini = IniUtils.loadINIFile(configFile);
- } else if (configFileUrl != null) {
- ini = IniUtils.loadINIFile(configFileUrl);
- } else {
- return;
+ <T> Option(IOptionType<T> parser) {
+ this(parser, (T)null);
}
- ipAddress = IniUtils.getString(ini, "cc", "address", ipAddress);
- clientNetIpAddress = IniUtils.getString(ini, "cc", "client.address", clientNetIpAddress);
- clientNetPort = IniUtils.getInt(ini, "cc", "client.port", clientNetPort);
- clusterNetIpAddress = IniUtils.getString(ini, "cc", "cluster.address", clusterNetIpAddress);
- clusterNetPort = IniUtils.getInt(ini, "cc", "cluster.port", clusterNetPort);
- httpPort = IniUtils.getInt(ini, "cc", "http.port", httpPort);
- heartbeatPeriod = IniUtils.getInt(ini, "cc", "heartbeat.period", heartbeatPeriod);
- maxHeartbeatLapsePeriods = IniUtils.getInt(ini, "cc", "heartbeat.maxlapse", maxHeartbeatLapsePeriods);
- profileDumpPeriod = IniUtils.getInt(ini, "cc", "profiledump.period", profileDumpPeriod);
- defaultMaxJobAttempts = IniUtils.getInt(ini, "cc", "job.defaultattempts", defaultMaxJobAttempts);
- jobHistorySize = IniUtils.getInt(ini, "cc", "job.historysize", jobHistorySize);
- resultTTL = IniUtils.getLong(ini, "cc", "results.ttl", resultTTL);
- resultSweepThreshold = IniUtils.getLong(ini, "cc", "results.sweepthreshold", resultSweepThreshold);
- ccRoot = IniUtils.getString(ini, "cc", "rootfolder", ccRoot);
- // QQQ clusterTopologyDefinition is a "File"; should support verifying that the file
- // exists, as @Option likely does
- appCCMainClass = IniUtils.getString(ini, "cc", "app.class", appCCMainClass);
+ <T> Option(IOptionType<T> parser, Option defaultOption) {
+ this.parser = parser;
+ this.defaultValue = defaultOption;
+ }
+
+ <T> Option(IOptionType<T> parser, T defaultValue) {
+ this.parser = parser;
+ this.defaultValue = defaultValue;
+ }
+
+ <T> Option(IOptionType<T> parser, Supplier<T> defaultValue) {
+ this.parser = parser;
+ this.defaultValue = defaultValue;
+ }
+
+ @Override
+ public Section section() {
+ return Section.CC;
+ }
+
+ @Override
+ public IOptionType type() {
+ return parser;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
+ }
+
+ @Override
+ public String description() {
+ switch (this) {
+ case APP_CLASS:
+ return "Application CC main class";
+ case ADDRESS:
+ return "Default bind address for all services on this cluster controller";
+ case CLUSTER_LISTEN_ADDRESS:
+ return "Sets the IP Address to listen for connections from NCs";
+ case CLUSTER_LISTEN_PORT:
+ return "Sets the port to listen for connections from node controllers";
+ case CLUSTER_PUBLIC_ADDRESS:
+ return "Address that NCs should use to contact this CC";
+ case CLUSTER_PUBLIC_PORT:
+ return "Port that NCs should use to contact this CC";
+ case CLIENT_LISTEN_ADDRESS:
+ return "Sets the IP Address to listen for connections from clients";
+ case CLIENT_LISTEN_PORT:
+ return "Sets the port to listen for connections from clients";
+ case CONSOLE_LISTEN_ADDRESS:
+ return "Sets the listen address for the Cluster Controller";
+ case CONSOLE_LISTEN_PORT:
+ return "Sets the http port for the Cluster Controller)";
+ case HEARTBEAT_PERIOD:
+ return "Sets the time duration between two heartbeats from each node controller in milliseconds";
+ case HEARTBEAT_MAX_MISSES:
+ return "Sets the maximum number of missed heartbeats before a node is marked as dead";
+ case PROFILE_DUMP_PERIOD:
+ return "Sets the time duration between two profile dumps from each node controller in " +
+ "milliseconds; 0 to disable";
+ case JOB_HISTORY_SIZE:
+ return "Limits the number of historical jobs remembered by the system to the specified value";
+ case RESULT_TTL:
+ return "Limits the amount of time results for asynchronous jobs should be retained by the system " +
+ "in milliseconds";
+ case RESULT_SWEEP_THRESHOLD:
+ return "The duration within which an instance of the result cleanup should be invoked in " +
+ "milliseconds";
+ case ROOT_DIR:
+ return "Sets the root folder used for file operations";
+ case CLUSTER_TOPOLOGY:
+ return "Sets the XML file that defines the cluster topology";
+ case JOB_QUEUE_CLASS:
+ return "Specify the implementation class name for the job queue";
+ case JOB_MANAGER_CLASS:
+ return "Specify the implementation class name for the job manager";
+ default:
+ throw new IllegalStateException("NYI: " + this);
+ }
+ }
}
- /**
- * Once all @Option fields have been loaded from command-line or otherwise
- * specified programmatically, call this method to:
- * 1. Load options from a config file (as specified by -config-file)
- * 2. Set default values for certain derived values, such as setting
- * clusterNetIpAddress to ipAddress
- */
- public void loadConfigAndApplyDefaults() throws IOException {
- loadINIFile();
- if (ini != null) {
- // QQQ This way of passing overridden/defaulted values back into
- // the ini feels clunky, and it's clearly incomplete
- ini.add("cc", "cluster.address", clusterNetIpAddress);
- ini.add("cc", "client.address", clientNetIpAddress);
- }
+ private final ConfigManager configManager;
- // "address" is the default for all IP addresses
- clusterNetIpAddress = clusterNetIpAddress == null ? ipAddress : clusterNetIpAddress;
- clientNetIpAddress = clientNetIpAddress == null ? ipAddress : clientNetIpAddress;
+ private List<String> appArgs = new ArrayList<>();
+
+ public CCConfig() {
+ this(new ConfigManager());
+ }
+
+ public CCConfig(ConfigManager configManager) {
+ super(configManager);
+ this.configManager = configManager;
+ configManager.register(Option.class);
+ configManager.registerArgsListener(appArgs::addAll);
+ }
+
+ public List<String> getAppArgs() {
+ return appArgs;
+ }
+
+ public String[] getAppArgsArray() {
+ return appArgs.toArray(new String[appArgs.size()]);
}
/**
@@ -175,15 +179,158 @@
* if -config-file wasn't specified.
*/
public Ini getIni() {
- return ini;
+ return configManager.toIni(false);
}
- /**
- * @return An IApplicationConfig representing this NCConfig.
- * Note: Currently this only includes the values from the configuration
- * file, not anything specified on the command-line. QQQ
- */
- public IApplicationConfig getAppConfig() {
- return new IniApplicationConfig(ini);
+ public ConfigManager getConfigManager() {
+ return configManager;
+ }
+
+ // QQQ Note that clusterListenAddress is *not directly used* yet. Both
+ // the cluster listener and the web server listen on "all interfaces".
+ // This IP address is only used to instruct the NC on which IP to call in.
+ public String getClusterListenAddress() {
+ return getAppConfig().getString(Option.CLUSTER_LISTEN_ADDRESS);
+ }
+
+ public void setClusterListenAddress(String clusterListenAddress) {
+ configManager.set(Option.CLUSTER_LISTEN_ADDRESS, clusterListenAddress);
+ }
+
+ public int getClusterListenPort() {
+ return getAppConfig().getInt(Option.CLUSTER_LISTEN_PORT);
+ }
+
+ public void setClusterListenPort(int clusterListenPort) {
+ configManager.set(Option.CLUSTER_LISTEN_PORT, clusterListenPort);
+ }
+
+ public String getClusterPublicAddress() {
+ return getAppConfig().getString(Option.CLUSTER_PUBLIC_ADDRESS);
+ }
+
+ public void setClusterPublicAddress(String clusterPublicAddress) {
+ configManager.set(Option.CLUSTER_PUBLIC_ADDRESS, clusterPublicAddress);
+ }
+
+ public int getClusterPublicPort() {
+ return getAppConfig().getInt(Option.CLUSTER_PUBLIC_PORT);
+ }
+
+ public void setClusterPublicPort(int clusterPublicPort) {
+ configManager.set(Option.CLUSTER_PUBLIC_PORT, clusterPublicPort);
+ }
+
+ public String getClientListenAddress() {
+ return getAppConfig().getString(Option.CLIENT_LISTEN_ADDRESS);
+ }
+
+ public void setClientListenAddress(String clientListenAddress) {
+ configManager.set(Option.CLIENT_LISTEN_ADDRESS, clientListenAddress);
+ }
+
+ public int getClientListenPort() {
+ return getAppConfig().getInt(Option.CLIENT_LISTEN_PORT);
+ }
+
+ public void setClientListenPort(int clientListenPort) {
+ configManager.set(Option.CLIENT_LISTEN_PORT, clientListenPort);
+ }
+
+ public int getConsoleListenPort() {
+ return getAppConfig().getInt(Option.CONSOLE_LISTEN_PORT);
+ }
+
+ public void setConsoleListenPort(int consoleListenPort) {
+ configManager.set(Option.CONSOLE_LISTEN_PORT, consoleListenPort);
+ }
+
+ public int getHeartbeatPeriod() {
+ return getAppConfig().getInt(Option.HEARTBEAT_PERIOD);
+ }
+
+ public void setHeartbeatPeriod(int heartbeatPeriod) {
+ configManager.set(Option.HEARTBEAT_PERIOD, heartbeatPeriod);
+ }
+
+ public int getHeartbeatMaxMisses() {
+ return getAppConfig().getInt(Option.HEARTBEAT_MAX_MISSES);
+ }
+
+ public void setHeartbeatMaxMisses(int heartbeatMaxMisses) {
+ configManager.set(Option.HEARTBEAT_MAX_MISSES, heartbeatMaxMisses);
+ }
+
+ public int getProfileDumpPeriod() {
+ return getAppConfig().getInt(Option.PROFILE_DUMP_PERIOD);
+ }
+
+ public void setProfileDumpPeriod(int profileDumpPeriod) {
+ configManager.set(Option.PROFILE_DUMP_PERIOD, profileDumpPeriod);
+ }
+
+ public int getJobHistorySize() {
+ return getAppConfig().getInt(Option.JOB_HISTORY_SIZE);
+ }
+
+ public void setJobHistorySize(int jobHistorySize) {
+ configManager.set(Option.JOB_HISTORY_SIZE, jobHistorySize);
+ }
+
+ public long getResultTTL() {
+ return getAppConfig().getLong(Option.RESULT_TTL);
+ }
+
+ public void setResultTTL(long resultTTL) {
+ configManager.set(Option.RESULT_TTL, resultTTL);
+ }
+
+ public long getResultSweepThreshold() {
+ return getAppConfig().getLong(Option.RESULT_SWEEP_THRESHOLD);
+ }
+
+ public void setResultSweepThreshold(long resultSweepThreshold) {
+ configManager.set(Option.RESULT_SWEEP_THRESHOLD, resultSweepThreshold);
+ }
+
+ public String getRootDir() {
+ return getAppConfig().getString(Option.ROOT_DIR);
+ }
+
+ public void setRootDir(String rootDir) {
+ configManager.set(Option.ROOT_DIR, rootDir);
+ }
+
+ public File getClusterTopology() {
+ return getAppConfig().getString(Option.CLUSTER_TOPOLOGY) == null ? null
+ : new File(getAppConfig().getString(Option.CLUSTER_TOPOLOGY));
+ }
+
+ public void setClusterTopology(File clusterTopology) {
+ configManager.set(Option.CLUSTER_TOPOLOGY, clusterTopology);
+ }
+
+ public String getAppClass() {
+ return getAppConfig().getString(Option.APP_CLASS);
+ }
+
+ public void setAppClass(String appClass) {
+ configManager.set(Option.APP_CLASS, appClass);
+ }
+
+ public String getJobQueueClass() {
+ return getAppConfig().getString(Option.JOB_QUEUE_CLASS);
+ }
+
+ public void setJobQueueClass(String jobQueueClass) {
+ configManager.set(Option.JOB_QUEUE_CLASS, jobQueueClass);
+ }
+
+ public String getJobManagerClass() {
+ return getAppConfig().getString(Option.JOB_MANAGER_CLASS);
+ }
+
+ public void setJobManagerClass(String jobManagerClass) {
+ configManager.set(Option.JOB_MANAGER_CLASS, jobManagerClass);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
new file mode 100644
index 0000000..4aae6df
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.common.controllers;
+
+import java.io.Serializable;
+import java.net.URL;
+
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.control.common.config.OptionTypes;
+
+public class ControllerConfig implements Serializable {
+
+ public enum Option implements IOption {
+ CONFIG_FILE(OptionTypes.STRING, "Specify path to master configuration file"),
+ CONFIG_FILE_URL(OptionTypes.URL, "Specify URL to master configuration file");
+
+ private final IOptionType type;
+ private final String description;
+
+
+ Option(IOptionType type, String description) {
+ this.type = type;
+ this.description = description;
+ }
+
+ @Override
+ public Section section() {
+ return Section.COMMON;
+ }
+
+ @Override
+ public String description() {
+ return description;
+ }
+
+ @Override
+ public IOptionType type() {
+ return type;
+ }
+
+ @Override
+ public Object defaultValue() {
+ return null;
+ }
+ }
+
+ protected final ConfigManager configManager;
+
+ protected ControllerConfig(ConfigManager configManager) {
+ this.configManager = configManager;
+ }
+
+ public IApplicationConfig getAppConfig() {
+ return configManager.getAppConfig();
+ }
+
+ public String getConfigFile() {
+ return getAppConfig().getString(ControllerConfig.Option.CONFIG_FILE);
+ }
+
+ public void setConfigFile(String configFile) {
+ configManager.set(ControllerConfig.Option.CONFIG_FILE, configFile);
+ }
+
+ public URL getConfigFileUrl() {
+ return (URL) getAppConfig().get(ControllerConfig.Option.CONFIG_FILE_URL);
+ }
+
+ public void setConfigFileUrl(URL configFileUrl) {
+ configManager.set(ControllerConfig.Option.CONFIG_FILE_URL, configFileUrl);
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java
deleted file mode 100644
index 451421d..0000000
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/IniUtils.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.control.common.controllers;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.lang.reflect.Array;
-import java.net.URL;
-
-import org.ini4j.Ini;
-import org.ini4j.Profile.Section;
-
-/**
- * Some utility functions for reading Ini4j objects with default values.
- * For all getXxx() methods: if the 'section' contains a slash, and the 'key'
- * is not found in that section, we will search for the key in the section named
- * by stripping the leaf of the section name (final slash and anything following).
- * eg. getInt(ini, "nc/red", "dir", null) will first look for the key "dir" in
- * the section "nc/red", but if it is not found, will look in the section "nc".
- */
-public class IniUtils {
-
- private IniUtils() {
- }
-
- private static <T> T getIniValue(Ini ini, String section, String key, T defaultValue, Class<T> clazz) {
- T value;
- while (true) {
- value = ini.get(section, key, clazz);
- if (value == null) {
- int idx = section.lastIndexOf('/');
- if (idx > -1) {
- section = section.substring(0, idx);
- continue;
- }
- }
- break;
- }
- return (value != null) ? value : defaultValue;
- }
-
- @SuppressWarnings("unchecked")
- private static <T> T getIniArray(Ini ini, String section, String key, Class<T> clazz) {
- Section sec = ini.get(section);
- if (clazz.getComponentType() == null) {
- return null;
- }
- if (sec == null) {
- return (T) Array.newInstance(clazz.getComponentType(), 0);
- } else {
- return sec.getAll(key, clazz);
- }
- }
-
- public static String getString(Ini ini, String section, String key, String defaultValue) {
- return getIniValue(ini, section, key, defaultValue, String.class);
- }
-
- public static String[] getStringArray(Ini ini, String section, String key) {
- return getIniArray(ini, section, key, String[].class);
- }
-
- public static int getInt(Ini ini, String section, String key, int defaultValue) {
- return getIniValue(ini, section, key, defaultValue, Integer.class);
- }
-
- public static long getLong(Ini ini, String section, String key, long defaultValue) {
- return getIniValue(ini, section, key, defaultValue, Long.class);
- }
-
- public static Ini loadINIFile(String configFile) throws IOException {
- Ini ini = new Ini();
- File conffile = new File(configFile);
- if (!conffile.exists()) {
- throw new FileNotFoundException(configFile);
- }
- ini.load(conffile);
- return ini;
- }
-
- public static Ini loadINIFile(URL configURL) throws IOException {
- Ini ini = new Ini();
- ini.load(configURL);
- return ini;
- }
-}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
index fa7d76a..7906b52 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
@@ -18,287 +18,485 @@
*/
package org.apache.hyracks.control.common.controllers;
-import java.io.IOException;
-import java.io.Serializable;
+import static org.apache.hyracks.control.common.config.OptionTypes.BOOLEAN;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER;
+import static org.apache.hyracks.control.common.config.OptionTypes.INTEGER_BYTE_UNIT;
+import static org.apache.hyracks.control.common.config.OptionTypes.LONG;
+import static org.apache.hyracks.control.common.config.OptionTypes.STRING;
+import static org.apache.hyracks.control.common.config.OptionTypes.STRING_ARRAY;
+
import java.net.InetAddress;
-import java.net.URL;
+import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
+import java.util.function.Supplier;
-import org.apache.hyracks.api.application.IApplicationConfig;
-import org.apache.hyracks.control.common.application.IniApplicationConfig;
-import org.ini4j.Ini;
-import org.kohsuke.args4j.Argument;
-import org.kohsuke.args4j.Option;
-import org.kohsuke.args4j.spi.StopOptionHandler;
+import org.apache.hyracks.api.config.IApplicationConfig;
+import org.apache.hyracks.api.config.IOption;
+import org.apache.hyracks.api.config.IOptionType;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.util.file.FileUtil;
-public class NCConfig implements Serializable {
- private static final long serialVersionUID = 2L;
+public class NCConfig extends ControllerConfig {
+ private static final long serialVersionUID = 3L;
- @Option(name = "-cc-host", usage = "Cluster Controller host name (required unless specified in config file)",
- required = false)
- public String ccHost = null;
+ public static String defaultDir = System.getProperty("java.io.tmpdir");
+ public static String defaultAppClass = null;
- @Option(name = "-cc-port", usage = "Cluster Controller port (default: 1099)", required = false)
- public int ccPort = 1099;
+ public enum Option implements IOption {
+ ADDRESS(STRING, InetAddress.getLoopbackAddress().getHostAddress()),
+ PUBLIC_ADDRESS(STRING, ADDRESS),
+ CLUSTER_LISTEN_ADDRESS(STRING, ADDRESS),
+ CLUSTER_LISTEN_PORT(INTEGER, 0),
+ NCSERVICE_ADDRESS(STRING, PUBLIC_ADDRESS),
+ NCSERVICE_PORT(INTEGER, 9090),
+ CLUSTER_ADDRESS(STRING, (String)null),
+ CLUSTER_PORT(INTEGER, 1099),
+ CLUSTER_PUBLIC_ADDRESS(STRING, PUBLIC_ADDRESS),
+ CLUSTER_PUBLIC_PORT(INTEGER, CLUSTER_LISTEN_PORT),
+ NODE_ID(STRING, (String)null),
+ DATA_LISTEN_ADDRESS(STRING, ADDRESS),
+ DATA_LISTEN_PORT(INTEGER, 0),
+ DATA_PUBLIC_ADDRESS(STRING, PUBLIC_ADDRESS),
+ DATA_PUBLIC_PORT(INTEGER, DATA_LISTEN_PORT),
+ RESULT_LISTEN_ADDRESS(STRING, ADDRESS),
+ RESULT_LISTEN_PORT(INTEGER, 0),
+ RESULT_PUBLIC_ADDRESS(STRING, PUBLIC_ADDRESS),
+ RESULT_PUBLIC_PORT(INTEGER, RESULT_LISTEN_PORT),
+ MESSAGING_LISTEN_ADDRESS(STRING, ADDRESS),
+ MESSAGING_LISTEN_PORT(INTEGER, 0),
+ MESSAGING_PUBLIC_ADDRESS(STRING, PUBLIC_ADDRESS),
+ MESSAGING_PUBLIC_PORT(INTEGER, MESSAGING_LISTEN_PORT),
+ CLUSTER_CONNECT_RETRIES(INTEGER, 5),
+ @SuppressWarnings("RedundantCast") // not redundant- false positive from IDEA
+ IODEVICES(STRING_ARRAY, (Supplier<String []>)() -> new String [] { FileUtil.joinPath(defaultDir, "iodevice") }),
+ NET_THREAD_COUNT(INTEGER, 1),
+ NET_BUFFER_COUNT(INTEGER, 1),
+ RESULT_TTL(LONG, 86400000L),
+ RESULT_SWEEP_THRESHOLD(LONG, 60000L),
+ RESULT_MANAGER_MEMORY(INTEGER_BYTE_UNIT, -1),
+ @SuppressWarnings("RedundantCast") // not redundant- false positive from IDEA
+ APP_CLASS(STRING, (Supplier<String>)() -> defaultAppClass),
+ NCSERVICE_PID(INTEGER, -1),
+ COMMAND(STRING, "hyracksnc"),
+ JVM_ARGS(STRING, (String)null),
+ VIRTUAL_NC(BOOLEAN, false);
- @Option(name = "-address", usage = "IP Address for NC (default: localhost)", required = false)
- public String ipAddress = InetAddress.getLoopbackAddress().getHostAddress();
+ private final IOptionType parser;
+ private final Object defaultValue;
- @Option(name = "-cluster-net-ip-address", usage = "IP Address to bind cluster listener (default: same as -address)",
- required = false)
- public String clusterNetIPAddress;
-
- @Option(name = "-cluster-net-port", usage = "IP port to bind cluster listener (default: random port)",
- required = false)
- public int clusterNetPort = 0;
-
- @Option(name = "-cluster-net-public-ip-address",
- usage = "Public IP Address to announce cluster listener (default: same as -cluster-net-ip-address)",
- required = false)
- public String clusterNetPublicIPAddress;
-
- @Option(name = "-cluster-net-public-port",
- usage = "Public IP port to announce cluster listener (default: same as -cluster-net-port; " +
- "must set -cluster-net-public-ip-address also)", required = false)
- public int clusterNetPublicPort = 0;
-
- @Option(name = "-node-id", usage = "Logical name of node controller unique within the cluster (required unless " +
- "specified in config file)", required = false)
- public String nodeId = null;
-
- @Option(name = "-data-ip-address", usage = "IP Address to bind data listener (default: same as -address)",
- required = false)
- public String dataIPAddress;
-
- @Option(name = "-data-port", usage = "IP port to bind data listener (default: random port)", required = false)
- public int dataPort = 0;
-
- @Option(name = "-data-public-ip-address",
- usage = "Public IP Address to announce data listener (default: same as -data-ip-address)", required = false)
- public String dataPublicIPAddress;
-
- @Option(name = "-data-public-port",
- usage = "Public IP port to announce data listener (default: same as -data-port; must set " +
- "-data-public-ip-address also)", required = false)
- public int dataPublicPort = 0;
-
- @Option(name = "-result-ip-address",
- usage = "IP Address to bind dataset result distribution listener (default: same as -address)",
- required = false)
- public String resultIPAddress;
-
- @Option(name = "-result-port",
- usage = "IP port to bind dataset result distribution listener (default: random port)",
- required = false)
- public int resultPort = 0;
-
- @Option(name = "-result-public-ip-address",
- usage = "Public IP Address to announce dataset result distribution listener (default: same as " +
- "-result-ip-address)", required = false)
- public String resultPublicIPAddress;
-
- @Option(name = "-result-public-port", usage = "Public IP port to announce dataset result distribution listener " +
- "(default: same as -result-port; must set -result-public-ip-address also)", required = false)
- public int resultPublicPort = 0;
-
- @Option(name = "-retries", usage = "Number of attempts to contact CC before giving up (default: 5)")
- public int retries = 5;
-
- @Option(name = "-iodevices",
- usage = "Comma separated list of IO Device mount points (default: One device in default temp folder)",
- required = false)
- public String ioDevices = System.getProperty("java.io.tmpdir");
-
- @Option(name = "-net-thread-count", usage = "Number of threads to use for Network I/O (default: 1)")
- public int nNetThreads = 1;
-
- @Option(name = "-net-buffer-count", usage = "Number of network buffers per input/output channel (default: 1)",
- required = false)
- public int nNetBuffers = 1;
-
- @Option(name = "-max-memory", usage = "Maximum memory usable at this Node Controller in bytes (default: -1 auto)")
- public int maxMemory = -1;
-
- @Option(name = "-result-time-to-live", usage = "Limits the amount of time results for asynchronous jobs should " +
- "be retained by the system in milliseconds. (default: 24 hours)")
- public long resultTTL = 86400000;
-
- @Option(name = "-result-sweep-threshold", usage = "The duration within which an instance of the result cleanup " +
- "should be invoked in milliseconds. (default: 1 minute)")
- public long resultSweepThreshold = 60000;
-
- @Option(name = "-result-manager-memory",
- usage = "Memory usable for result caching at this Node Controller in bytes (default: -1 auto)")
- public int resultManagerMemory = -1;
-
- @Option(name = "-app-nc-main-class", usage = "Application NC Main Class")
- public String appNCMainClass;
-
- @Option(name = "-config-file", usage = "Specify path to local configuration file (default: no local config)",
- required = false)
- public String configFile = null;
-
- @Option(name = "-messaging-ip-address", usage = "IP Address to bind messaging "
- + "listener (default: same as -address)", required = false)
- public String messagingIPAddress;
-
- @Option(name = "-messaging-port", usage = "IP port to bind messaging listener "
- + "(default: random port)", required = false)
- public int messagingPort = 0;
-
- @Option(name = "-messaging-public-ip-address", usage = "Public IP Address to announce messaging"
- + " listener (default: same as -messaging-ip-address)", required = false)
- public String messagingPublicIPAddress;
-
- @Option(name = "-messaging-public-port", usage = "Public IP port to announce messaging listener"
- + " (default: same as -messaging-port; must set -messaging-public-port also)", required = false)
- public int messagingPublicPort = 0;
-
- @Option(name = "-ncservice-pid", usage = "PID of the NCService which launched this NCDriver", required = false)
- public int ncservicePid = -1;
-
- @Argument
- @Option(name = "--", handler = StopOptionHandler.class)
- public List<String> appArgs;
-
- public URL configFileUrl = null;
-
- private transient Ini ini = null;
-
- private void loadINIFile() throws IOException {
- if (configFile != null) {
- ini = IniUtils.loadINIFile(configFile);
- } else if (configFileUrl != null) {
- ini = IniUtils.loadINIFile(configFileUrl);
- } else {
- return;
+ <T> Option(IOptionType<T> parser, Option defaultOption) {
+ this.parser = parser;
+ this.defaultValue = defaultOption;
}
- // QQQ This should default to cc/address if cluster.address not set, but
- // that logic really should be handled by the ini file sent from the CC
- ccHost = IniUtils.getString(ini, "cc", "cluster.address", ccHost);
- ccPort = IniUtils.getInt(ini, "cc", "cluster.port", ccPort);
-
- // Get ID of *this* NC
- nodeId = IniUtils.getString(ini, "localnc", "id", nodeId);
- String nodeSection = "nc/" + nodeId;
-
- // Network ports
- ipAddress = IniUtils.getString(ini, nodeSection, "address", ipAddress);
-
- clusterNetIPAddress = IniUtils.getString(ini, nodeSection, "cluster.address", clusterNetIPAddress);
- clusterNetPort = IniUtils.getInt(ini, nodeSection, "cluster.port", clusterNetPort);
- dataIPAddress = IniUtils.getString(ini, nodeSection, "data.address", dataIPAddress);
- dataPort = IniUtils.getInt(ini, nodeSection, "data.port", dataPort);
- resultIPAddress = IniUtils.getString(ini, nodeSection, "result.address", resultIPAddress);
- resultPort = IniUtils.getInt(ini, nodeSection, "result.port", resultPort);
-
- clusterNetPublicIPAddress = IniUtils.getString(ini, nodeSection, "public.cluster.address",
- clusterNetPublicIPAddress);
- clusterNetPublicPort = IniUtils.getInt(ini, nodeSection, "public.cluster.port", clusterNetPublicPort);
- dataPublicIPAddress = IniUtils.getString(ini, nodeSection, "public.data.address", dataPublicIPAddress);
- dataPublicPort = IniUtils.getInt(ini, nodeSection, "public.data.port", dataPublicPort);
- resultPublicIPAddress = IniUtils.getString(ini, nodeSection, "public.result.address", resultPublicIPAddress);
- resultPublicPort = IniUtils.getInt(ini, nodeSection, "public.result.port", resultPublicPort);
-
- messagingIPAddress = IniUtils.getString(ini, nodeSection, "messaging.address", messagingIPAddress);
- messagingPort = IniUtils.getInt(ini, nodeSection, "messaging.port", messagingPort);
- messagingPublicIPAddress = IniUtils.getString(ini, nodeSection, "public.messaging.address",
- messagingPublicIPAddress);
- messagingPublicPort = IniUtils.getInt(ini, nodeSection, "public.messaging.port", messagingPublicPort);
-
- retries = IniUtils.getInt(ini, nodeSection, "retries", retries);
-
- // Directories
- ioDevices = IniUtils.getString(ini, nodeSection, "iodevices", ioDevices);
-
- // Hyracks client entrypoint
- appNCMainClass = IniUtils.getString(ini, nodeSection, "app.class", appNCMainClass);
- }
-
- /*
- * Once all @Option fields have been loaded from command-line or otherwise
- * specified programmatically, call this method to:
- * 1. Load options from a config file (as specified by -config-file)
- * 2. Set default values for certain derived values, such as setting
- * clusterNetIpAddress to ipAddress
- */
- public void loadConfigAndApplyDefaults() throws IOException {
- loadINIFile();
-
- // "address" is the default for all IP addresses
- if (clusterNetIPAddress == null) {
- clusterNetIPAddress = ipAddress;
- }
- if (dataIPAddress == null) {
- dataIPAddress = ipAddress;
- }
- if (resultIPAddress == null) {
- resultIPAddress = ipAddress;
+ <T> Option(IOptionType<T> parser, T defaultValue) {
+ this.parser = parser;
+ this.defaultValue = defaultValue;
}
- // All "public" options default to their "non-public" versions
- if (clusterNetPublicIPAddress == null) {
- clusterNetPublicIPAddress = clusterNetIPAddress;
+ <T> Option(IOptionType<T> parser, Supplier<T> defaultValue) {
+ this.parser = parser;
+ this.defaultValue = defaultValue;
}
- if (clusterNetPublicPort == 0) {
- clusterNetPublicPort = clusterNetPort;
+
+ @Override
+ public Section section() {
+ switch (this) {
+ case NODE_ID:
+ return Section.LOCALNC;
+ default:
+ return Section.NC;
+ }
}
- if (dataPublicIPAddress == null) {
- dataPublicIPAddress = dataIPAddress;
+
+ @Override
+ public String description() {
+ switch (this) {
+ case ADDRESS:
+ return "Default IP Address to bind listeners on this NC. All services will bind on this address " +
+ "unless a service-specific listen address is supplied.";
+ case CLUSTER_LISTEN_ADDRESS:
+ return "IP Address to bind cluster listener on this NC";
+ case PUBLIC_ADDRESS:
+ return "Default public address that other processes should use to contact this NC. All services " +
+ "will advertise this address unless a service-specific public address is supplied.";
+ case NCSERVICE_ADDRESS:
+ return "Address the CC should use to contact the NCService associated with this NC";
+ case NCSERVICE_PORT:
+ return "Port the CC should use to contact the NCService associated with this NC";
+ case CLUSTER_ADDRESS:
+ return "Cluster Controller address (required unless specified in config file)";
+ case CLUSTER_PORT:
+ return "Cluster Controller port";
+ case CLUSTER_LISTEN_PORT:
+ return "IP port to bind cluster listener";
+ case CLUSTER_PUBLIC_ADDRESS:
+ return "Public IP Address to announce cluster listener";
+ case CLUSTER_PUBLIC_PORT:
+ return "Public IP port to announce cluster listener";
+ case NODE_ID:
+ return "Logical name of node controller unique within the cluster (required unless specified in " +
+ "config file)";
+ case DATA_LISTEN_ADDRESS:
+ return "IP Address to bind data listener";
+ case DATA_LISTEN_PORT:
+ return "IP port to bind data listener";
+ case DATA_PUBLIC_ADDRESS:
+ return "Public IP Address to announce data listener";
+ case DATA_PUBLIC_PORT:
+ return "Public IP port to announce data listener";
+ case RESULT_LISTEN_ADDRESS:
+ return "IP Address to bind dataset result distribution listener";
+ case RESULT_LISTEN_PORT:
+ return "IP port to bind dataset result distribution listener";
+ case RESULT_PUBLIC_ADDRESS:
+ return "Public IP Address to announce dataset result distribution listener";
+ case RESULT_PUBLIC_PORT:
+ return "Public IP port to announce dataset result distribution listener";
+ case MESSAGING_LISTEN_ADDRESS:
+ return "IP Address to bind messaging listener";
+ case MESSAGING_LISTEN_PORT:
+ return "IP port to bind messaging listener";
+ case MESSAGING_PUBLIC_ADDRESS:
+ return "Public IP Address to announce messaging listener";
+ case MESSAGING_PUBLIC_PORT:
+ return "Public IP port to announce messaging listener";
+ case CLUSTER_CONNECT_RETRIES:
+ return "Number of attempts to contact CC before giving up";
+ case IODEVICES:
+ return "Comma separated list of IO Device mount points";
+ case NET_THREAD_COUNT:
+ return "Number of threads to use for Network I/O";
+ case NET_BUFFER_COUNT:
+ return "Number of network buffers per input/output channel";
+ case RESULT_TTL:
+ return "Limits the amount of time results for asynchronous jobs should be retained by the system " +
+ "in milliseconds";
+ case RESULT_SWEEP_THRESHOLD:
+ return "The duration within which an instance of the result cleanup should be invoked in " +
+ "milliseconds";
+ case RESULT_MANAGER_MEMORY:
+ return "Memory usable for result caching at this Node Controller in bytes";
+ case APP_CLASS:
+ return "Application NC Main Class";
+ case NCSERVICE_PID:
+ return "PID of the NCService which launched this NCDriver";
+ case COMMAND:
+ return "Command NCService should invoke to start the NCDriver";
+ case JVM_ARGS:
+ return "JVM args to pass to the NCDriver";
+ case VIRTUAL_NC:
+ return "A flag indicating if this NC is running on virtual cluster";
+ default:
+ throw new IllegalStateException("NYI: " + this);
+ }
}
- if (dataPublicPort == 0) {
- dataPublicPort = dataPort;
+
+
+ @Override
+ public IOptionType type() {
+ return parser;
}
- if (resultPublicIPAddress == null) {
- resultPublicIPAddress = resultIPAddress;
+
+ @Override
+ public Object defaultValue() {
+ return defaultValue;
}
- if (resultPublicPort == 0) {
- resultPublicPort = resultPort;
+
+ @Override
+ public boolean hidden() {
+ return this == VIRTUAL_NC;
}
}
- /**
- * @return An IApplicationConfig representing this NCConfig.
- * Note: Currently this only includes the values from the configuration
- * file, not anything specified on the command-line. QQQ
- */
+ private List<String> appArgs = new ArrayList<>();
+
+ private final IApplicationConfig appConfig;
+ private final String nodeId;
+
+ public NCConfig(String nodeId) {
+ this(nodeId, new ConfigManager(null));
+ }
+
+ public NCConfig(String nodeId, ConfigManager configManager) {
+ super(configManager);
+ this.appConfig = configManager.getNodeEffectiveConfig(nodeId);
+ configManager.register(Option.class);
+ setNodeId(nodeId);
+ this.nodeId = nodeId;
+ configManager.registerArgsListener(appArgs::addAll);
+ }
+
+ public List<String> getAppArgs() {
+ return appArgs;
+ }
+
+ public String[] getAppArgsArray() {
+ return appArgs.toArray(new String[appArgs.size()]);
+ }
+
+ public ConfigManager getConfigManager() {
+ return configManager;
+ }
+
public IApplicationConfig getAppConfig() {
- return new IniApplicationConfig(ini);
+ return appConfig;
}
- public void toMap(Map<String, String> configuration) {
- configuration.put("cc-host", ccHost);
- configuration.put("cc-port", (String.valueOf(ccPort)));
- configuration.put("cluster-net-ip-address", clusterNetIPAddress);
- configuration.put("cluster-net-port", String.valueOf(clusterNetPort));
- configuration.put("cluster-net-public-ip-address", clusterNetPublicIPAddress);
- configuration.put("cluster-net-public-port", String.valueOf(clusterNetPublicPort));
- configuration.put("node-id", nodeId);
- configuration.put("data-ip-address", dataIPAddress);
- configuration.put("data-port", String.valueOf(dataPort));
- configuration.put("data-public-ip-address", dataPublicIPAddress);
- configuration.put("data-public-port", String.valueOf(dataPublicPort));
- configuration.put("result-ip-address", resultIPAddress);
- configuration.put("result-port", String.valueOf(resultPort));
- configuration.put("result-public-ip-address", resultPublicIPAddress);
- configuration.put("result-public-port", String.valueOf(resultPublicPort));
- configuration.put("retries", String.valueOf(retries));
- configuration.put("iodevices", ioDevices);
- configuration.put("net-thread-count", String.valueOf(nNetThreads));
- configuration.put("net-buffer-count", String.valueOf(nNetBuffers));
- configuration.put("max-memory", String.valueOf(maxMemory));
- configuration.put("result-time-to-live", String.valueOf(resultTTL));
- configuration.put("result-sweep-threshold", String.valueOf(resultSweepThreshold));
- configuration.put("result-manager-memory", String.valueOf(resultManagerMemory));
- configuration.put("messaging-ip-address", messagingIPAddress);
- configuration.put("messaging-port", String.valueOf(messagingPort));
- configuration.put("messaging-public-ip-address", messagingPublicIPAddress);
- configuration.put("messaging-public-port", String.valueOf(messagingPublicPort));
- configuration.put("ncservice-pid", String.valueOf(ncservicePid));
- if (appNCMainClass != null) {
- configuration.put("app-nc-main-class", appNCMainClass);
- }
+ public String getPublicAddress() {
+ return appConfig.getString(Option.PUBLIC_ADDRESS);
+ }
+
+ public void setPublicAddress(String publicAddress) {
+ configManager.set(nodeId, Option.PUBLIC_ADDRESS, publicAddress);
+ }
+
+ public String getNCServiceAddress() {
+ return appConfig.getString(Option.NCSERVICE_ADDRESS);
+ }
+
+ public void setNCServiceAddress(String ncserviceAddress) {
+ configManager.set(nodeId, Option.NCSERVICE_ADDRESS, ncserviceAddress);
+ }
+
+ public int getNCServicePort() {
+ return appConfig.getInt(Option.NCSERVICE_PORT);
+ }
+
+ public void setNCServicePort(int ncservicePort) {
+ configManager.set(nodeId, Option.NCSERVICE_PORT, ncservicePort);
+ }
+
+ public String getClusterAddress() {
+ return appConfig.getString(Option.CLUSTER_ADDRESS);
+ }
+
+ public void setClusterAddress(String clusterAddress) {
+ configManager.set(nodeId, Option.CLUSTER_ADDRESS, clusterAddress);
+ }
+
+ public int getClusterPort() {
+ return appConfig.getInt(Option.CLUSTER_PORT);
+ }
+
+ public void setClusterPort(int clusterPort) {
+ configManager.set(nodeId, Option.CLUSTER_PORT, clusterPort);
+ }
+
+ public String getClusterListenAddress() {
+ return appConfig.getString(Option.CLUSTER_LISTEN_ADDRESS);
+ }
+
+ public void setClusterListenAddress(String clusterListenAddress) {
+ configManager.set(nodeId, Option.CLUSTER_LISTEN_ADDRESS, clusterListenAddress);
+ }
+
+ public int getClusterListenPort() {
+ return appConfig.getInt(Option.CLUSTER_LISTEN_PORT);
+ }
+
+ public void setClusterListenPort(int clusterListenPort) {
+ configManager.set(nodeId, Option.CLUSTER_LISTEN_PORT, clusterListenPort);
+ }
+
+ public String getClusterPublicAddress() {
+ return appConfig.getString(Option.CLUSTER_PUBLIC_ADDRESS);
+ }
+
+ public void setClusterPublicAddress(String clusterPublicAddress) {
+ configManager.set(nodeId, Option.CLUSTER_PUBLIC_ADDRESS, clusterPublicAddress);
+ }
+
+ public int getClusterPublicPort() {
+ return appConfig.getInt(Option.CLUSTER_PUBLIC_PORT);
+ }
+
+ public void setClusterPublicPort(int clusterPublicPort) {
+ configManager.set(nodeId, Option.CLUSTER_PUBLIC_PORT, clusterPublicPort);
+ }
+
+ public String getNodeId() {
+ return appConfig.getString(Option.NODE_ID);
+ }
+
+ public void setNodeId(String nodeId) {
+ configManager.set(nodeId, Option.NODE_ID, nodeId);
+ }
+
+ public String getDataListenAddress() {
+ return appConfig.getString(Option.DATA_LISTEN_ADDRESS);
+ }
+
+ public void setDataListenAddress(String dataListenAddress) {
+ configManager.set(nodeId, Option.DATA_LISTEN_ADDRESS, dataListenAddress);
+ }
+
+ public int getDataListenPort() {
+ return appConfig.getInt(Option.DATA_LISTEN_PORT);
+ }
+
+ public void setDataListenPort(int dataListenPort) {
+ configManager.set(nodeId, Option.DATA_LISTEN_PORT, dataListenPort);
+ }
+
+ public String getDataPublicAddress() {
+ return appConfig.getString(Option.DATA_PUBLIC_ADDRESS);
+ }
+
+ public void setDataPublicAddress(String dataPublicAddress) {
+ configManager.set(nodeId, Option.DATA_PUBLIC_ADDRESS, dataPublicAddress);
+ }
+
+ public int getDataPublicPort() {
+ return appConfig.getInt(Option.DATA_PUBLIC_PORT);
+ }
+
+ public void setDataPublicPort(int dataPublicPort) {
+ configManager.set(nodeId, Option.DATA_PUBLIC_PORT, dataPublicPort);
+ }
+
+ public String getResultListenAddress() {
+ return appConfig.getString(Option.RESULT_LISTEN_ADDRESS);
+ }
+
+ public void setResultListenAddress(String resultListenAddress) {
+ configManager.set(nodeId, Option.RESULT_LISTEN_ADDRESS, resultListenAddress);
+ }
+
+ public int getResultListenPort() {
+ return appConfig.getInt(Option.RESULT_LISTEN_PORT);
+ }
+
+ public void setResultListenPort(int resultListenPort) {
+ configManager.set(nodeId, Option.RESULT_LISTEN_PORT, resultListenPort);
+ }
+
+ public String getResultPublicAddress() {
+ return appConfig.getString(Option.RESULT_PUBLIC_ADDRESS);
+ }
+
+ public void setResultPublicAddress(String resultPublicAddress) {
+ configManager.set(nodeId, Option.RESULT_PUBLIC_ADDRESS, resultPublicAddress);
+ }
+
+ public int getResultPublicPort() {
+ return appConfig.getInt(Option.RESULT_PUBLIC_PORT);
+ }
+
+ public void setResultPublicPort(int resultPublicPort) {
+ configManager.set(nodeId, Option.RESULT_PUBLIC_PORT, resultPublicPort);
+ }
+
+ public String getMessagingListenAddress() {
+ return appConfig.getString(Option.MESSAGING_LISTEN_ADDRESS);
+ }
+
+ public void setMessagingListenAddress(String messagingListenAddress) {
+ configManager.set(nodeId, Option.MESSAGING_LISTEN_ADDRESS, messagingListenAddress);
+ }
+
+ public int getMessagingListenPort() {
+ return appConfig.getInt(Option.MESSAGING_LISTEN_PORT);
+ }
+
+ public void setMessagingListenPort(int messagingListenPort) {
+ configManager.set(nodeId, Option.MESSAGING_LISTEN_PORT, messagingListenPort);
+ }
+
+ public String getMessagingPublicAddress() {
+ return appConfig.getString(Option.MESSAGING_PUBLIC_ADDRESS);
+ }
+
+ public void setMessagingPublicAddress(String messagingPublicAddress) {
+ configManager.set(nodeId, Option.MESSAGING_PUBLIC_ADDRESS, messagingPublicAddress);
+ }
+
+ public int getMessagingPublicPort() {
+ return appConfig.getInt(Option.MESSAGING_PUBLIC_PORT);
+ }
+
+ public void setMessagingPublicPort(int messagingPublicPort) {
+ configManager.set(nodeId, Option.MESSAGING_PUBLIC_PORT, messagingPublicPort);
+ }
+
+ public int getClusterConnectRetries() {
+ return appConfig.getInt(Option.CLUSTER_CONNECT_RETRIES);
+ }
+
+ public void setClusterConnectRetries(int clusterConnectRetries) {
+ configManager.set(nodeId, Option.CLUSTER_CONNECT_RETRIES, clusterConnectRetries);
+ }
+
+ public String[] getIODevices() {
+ return appConfig.getStringArray(Option.IODEVICES);
+ }
+
+ public void setIODevices(String[] iodevices) {
+ configManager.set(nodeId, Option.IODEVICES, iodevices);
+ }
+
+ public int getNetThreadCount() {
+ return appConfig.getInt(Option.NET_THREAD_COUNT);
+ }
+
+ public void setNetThreadCount(int netThreadCount) {
+ configManager.set(nodeId, Option.NET_THREAD_COUNT, netThreadCount);
+ }
+
+ public int getNetBufferCount() {
+ return appConfig.getInt(Option.NET_BUFFER_COUNT);
+ }
+
+ public void setNetBufferCount(int netBufferCount) {
+ configManager.set(nodeId, Option.NET_BUFFER_COUNT, netBufferCount);
+ }
+
+ public long getResultTTL() {
+ return appConfig.getLong(Option.RESULT_TTL);
+ }
+
+ public void setResultTTL(long resultTTL) {
+ configManager.set(nodeId, Option.RESULT_TTL, resultTTL);
+ }
+
+ public long getResultSweepThreshold() {
+ return appConfig.getLong(Option.RESULT_SWEEP_THRESHOLD);
+ }
+
+ public void setResultSweepThreshold(long resultSweepThreshold) {
+ configManager.set(nodeId, Option.RESULT_SWEEP_THRESHOLD, resultSweepThreshold);
+ }
+
+ public int getResultManagerMemory() {
+ return appConfig.getInt(Option.RESULT_MANAGER_MEMORY);
+ }
+
+ public void setResultManagerMemory(int resultManagerMemory) {
+ configManager.set(nodeId, Option.RESULT_MANAGER_MEMORY, resultManagerMemory);
+ }
+
+ public String getAppClass() {
+ return appConfig.getString(Option.APP_CLASS);
+ }
+
+ public void setAppClass(String appClass) {
+ configManager.set(nodeId, Option.APP_CLASS, appClass);
+ }
+
+ public int getNCServicePid() {
+ return appConfig.getInt(Option.NCSERVICE_PID);
+ }
+
+ public void setNCServicePid(int ncservicePid) {
+ configManager.set(nodeId, Option.NCSERVICE_PID, ncservicePid);
+ }
+
+ public boolean getVirtualNC() {
+ return appConfig.getBoolean(Option.VIRTUAL_NC);
+ }
+
+ public void setVirtualNC(boolean virtualNC) {
+ configManager.set(nodeId, Option.VIRTUAL_NC, virtualNC);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index 148cf18..a7e3fa9 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -60,7 +60,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCApplicationEntryPoint.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCApplicationEntryPoint.java
new file mode 100644
index 0000000..d4e67fd
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCApplicationEntryPoint.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.control.nc;
+
+import java.lang.management.ManagementFactory;
+import java.util.Arrays;
+
+import org.apache.hyracks.api.application.INCApplicationContext;
+import org.apache.hyracks.api.application.INCApplicationEntryPoint;
+import org.apache.hyracks.api.config.IConfigManager;
+import org.apache.hyracks.api.config.Section;
+import org.apache.hyracks.api.job.resource.NodeCapacity;
+import org.apache.hyracks.control.common.controllers.CCConfig;
+import org.apache.hyracks.control.common.controllers.ControllerConfig;
+import org.apache.hyracks.control.common.controllers.NCConfig;
+
+public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
+ public static final NCApplicationEntryPoint INSTANCE = new NCApplicationEntryPoint();
+
+ protected NCApplicationEntryPoint() {
+ }
+
+ @Override
+ public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
+ if (args.length > 0) {
+ throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
+ }
+ }
+
+ @Override
+ public void notifyStartupComplete() throws Exception {
+ // no-op
+ }
+
+ @Override
+ public void stop() throws Exception {
+ // no-op
+ }
+
+ @Override
+ public NodeCapacity getCapacity() {
+ int allCores = ManagementFactory.getOperatingSystemMXBean().getAvailableProcessors();
+ return new NodeCapacity(Runtime.getRuntime().maxMemory(), allCores > 1 ? allCores - 1 : allCores);
+ }
+
+ @Override
+ public void registerConfigOptions(IConfigManager configManager) {
+ configManager.addIniParamOptions(ControllerConfig.Option.CONFIG_FILE, ControllerConfig.Option.CONFIG_FILE_URL);
+ configManager.addCmdLineSections(Section.NC, Section.COMMON, Section.LOCALNC);
+ configManager.setUsageFilter(getUsageFilter());
+ configManager.register(ControllerConfig.Option.class, CCConfig.Option.class, NCConfig.Option.class);
+ }
+}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
index 2323d71..b52064e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
@@ -18,28 +18,32 @@
*/
package org.apache.hyracks.control.nc;
+import java.io.IOException;
+import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.hyracks.api.application.INCApplicationEntryPoint;
+import org.apache.hyracks.control.common.config.ConfigManager;
+import org.apache.hyracks.control.common.config.ConfigUtils;
import org.apache.hyracks.control.common.controllers.NCConfig;
-import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.CmdLineException;
+@SuppressWarnings("InfiniteLoopStatement")
public class NCDriver {
private static final Logger LOGGER = Logger.getLogger(NCDriver.class.getName());
- public static void main(String args[]) throws Exception {
+ private NCDriver() {
+ }
+
+ public static void main(String[] args) {
try {
- NCConfig ncConfig = new NCConfig();
- CmdLineParser cp = new CmdLineParser(ncConfig);
- try {
- cp.parseArgument(args);
- } catch (Exception e) {
- e.printStackTrace();
- cp.printUsage(System.err);
- System.exit(1);
- }
- ncConfig.loadConfigAndApplyDefaults();
- final NodeControllerService ncService = new NodeControllerService(ncConfig);
+ final String nodeId = ConfigUtils.getOptionValue(args, NCConfig.Option.NODE_ID);
+ final ConfigManager configManager = new ConfigManager(args);
+ INCApplicationEntryPoint appEntryPoint = getAppEntryPoint(args);
+ appEntryPoint.registerConfigOptions(configManager);
+ NCConfig ncConfig = new NCConfig(nodeId, configManager);
+ final NodeControllerService ncService = new NodeControllerService(ncConfig, appEntryPoint);
if (LOGGER.isLoggable(Level.SEVERE)) {
LOGGER.severe("Setting uncaught exception handler " + ncService.getLifeCycleComponentManager());
}
@@ -49,9 +53,20 @@
while (true) {
Thread.sleep(10000);
}
+ } catch (CmdLineException e) {
+ LOGGER.log(Level.FINE, "Exception parsing command line: " + Arrays.toString(args), e);
+ System.exit(2);
} catch (Exception e) {
- e.printStackTrace();
+ LOGGER.log(Level.SEVERE, "Exiting NCDriver due to exception", e);
System.exit(1);
}
}
+
+ private static INCApplicationEntryPoint getAppEntryPoint(String[] args)
+ throws ClassNotFoundException, InstantiationException, IllegalAccessException, IOException {
+ // determine app class so that we can use the correct implementation of the configuration...
+ String appClassName = ConfigUtils.getOptionValue(args, NCConfig.Option.APP_CLASS);
+ return appClassName != null ? (INCApplicationEntryPoint) (Class.forName(appClassName)).newInstance()
+ : NCApplicationEntryPoint.INSTANCE;
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index bf0ddb6..2ee9161 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -19,6 +19,7 @@
package org.apache.hyracks.control.nc;
import java.io.File;
+import java.io.IOException;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
@@ -51,11 +52,11 @@
import org.apache.hyracks.api.io.IODeviceHandle;
import org.apache.hyracks.api.job.ActivityClusterGraph;
import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.resource.NodeCapacity;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponentManager;
import org.apache.hyracks.api.lifecycle.LifeCycleComponentManager;
import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.common.base.IClusterController;
+import org.apache.hyracks.control.common.config.ConfigManager;
import org.apache.hyracks.control.common.context.ServerContext;
import org.apache.hyracks.control.common.controllers.NCConfig;
import org.apache.hyracks.control.common.controllers.NodeParameters;
@@ -84,6 +85,7 @@
import org.apache.hyracks.ipc.impl.IPCSystem;
import org.apache.hyracks.net.protocols.muxdemux.FullFrameChannelInterfaceFactory;
import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
+import org.kohsuke.args4j.CmdLineException;
public class NodeControllerService implements IControllerService {
private static final Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName());
@@ -130,7 +132,7 @@
private NCApplicationContext appCtx;
- private INCApplicationEntryPoint ncAppEntryPoint;
+ private final INCApplicationEntryPoint ncAppEntryPoint;
private final ILifeCycleComponentManager lccm;
@@ -154,13 +156,25 @@
private MessagingNetworkManager messagingNetManager;
- public NodeControllerService(NCConfig ncConfig) throws Exception {
- this.ncConfig = ncConfig;
- id = ncConfig.nodeId;
+ private final ConfigManager configManager;
- ioManager = new IOManager(IODeviceHandle.getDevices(ncConfig.ioDevices));
+ public NodeControllerService(NCConfig config) throws Exception {
+ this(config, getApplicationEntryPoint(config));
+ }
+
+ public NodeControllerService(NCConfig config, INCApplicationEntryPoint aep) throws IOException, CmdLineException {
+ this.ncConfig = config;
+ this.configManager = ncConfig.getConfigManager();
+ if (aep == null) {
+ throw new IllegalArgumentException("INCApplicationEntryPoint cannot be null");
+ }
+ configManager.processConfig();
+ this.ncAppEntryPoint = aep;
+ id = ncConfig.getNodeId();
+
+ ioManager = new IOManager(IODeviceHandle.getDevices(ncConfig.getIODevices()));
if (id == null) {
- throw new Exception("id not set");
+ throw new HyracksException("id not set");
}
lccm = new LifeCycleComponentManager();
@@ -224,14 +238,16 @@
private void init() throws Exception {
ioManager.setExecutor(executor);
- datasetPartitionManager = new DatasetPartitionManager(this, executor, ncConfig.resultManagerMemory,
- ncConfig.resultTTL, ncConfig.resultSweepThreshold);
- datasetNetworkManager = new DatasetNetworkManager(ncConfig.resultIPAddress, ncConfig.resultPort,
- datasetPartitionManager, ncConfig.nNetThreads, ncConfig.nNetBuffers, ncConfig.resultPublicIPAddress,
- ncConfig.resultPublicPort, FullFrameChannelInterfaceFactory.INSTANCE);
- if (ncConfig.messagingIPAddress != null && appCtx.getMessagingChannelInterfaceFactory() != null) {
- messagingNetManager = new MessagingNetworkManager(this, ncConfig.messagingIPAddress, ncConfig.messagingPort,
- ncConfig.nNetThreads, ncConfig.messagingPublicIPAddress, ncConfig.messagingPublicPort,
+ datasetPartitionManager = new DatasetPartitionManager(this, executor, ncConfig.getResultManagerMemory(),
+ ncConfig.getResultTTL(), ncConfig.getResultSweepThreshold());
+ datasetNetworkManager = new DatasetNetworkManager(ncConfig.getResultListenAddress(),
+ ncConfig.getResultListenPort(), datasetPartitionManager, ncConfig.getNetThreadCount(),
+ ncConfig.getNetBufferCount(), ncConfig.getResultPublicAddress(), ncConfig.getResultPublicPort(),
+ FullFrameChannelInterfaceFactory.INSTANCE);
+ if (ncConfig.getMessagingListenAddress() != null && appCtx.getMessagingChannelInterfaceFactory() != null) {
+ messagingNetManager = new MessagingNetworkManager(this, ncConfig.getMessagingListenAddress(),
+ ncConfig.getMessagingListenPort(), ncConfig.getNetThreadCount(),
+ ncConfig.getMessagingPublicAddress(), ncConfig.getMessagingPublicPort(),
appCtx.getMessagingChannelInterfaceFactory());
}
}
@@ -239,12 +255,13 @@
@Override
public void start() throws Exception {
LOGGER.log(Level.INFO, "Starting NodeControllerService");
- ipc = new IPCSystem(new InetSocketAddress(ncConfig.clusterNetIPAddress, ncConfig.clusterNetPort),
+ ipc = new IPCSystem(new InetSocketAddress(ncConfig.getClusterListenAddress(), ncConfig.getClusterListenPort()),
new NodeControllerIPCI(this), new CCNCFunctions.SerializerDeserializer());
ipc.start();
partitionManager = new PartitionManager(this);
- netManager = new NetworkManager(ncConfig.dataIPAddress, ncConfig.dataPort, partitionManager,
- ncConfig.nNetThreads, ncConfig.nNetBuffers, ncConfig.dataPublicIPAddress, ncConfig.dataPublicPort,
+ netManager = new NetworkManager(ncConfig.getDataListenAddress(), ncConfig.getDataListenPort(), partitionManager,
+ ncConfig.getNetThreadCount(), ncConfig.getNetBufferCount(), ncConfig.getDataPublicAddress(),
+ ncConfig.getDataPublicPort(),
FullFrameChannelInterfaceFactory.INSTANCE);
netManager.start();
@@ -255,8 +272,9 @@
if (messagingNetManager != null) {
messagingNetManager.start();
}
- IIPCHandle ccIPCHandle = ipc.getHandle(new InetSocketAddress(ncConfig.ccHost, ncConfig.ccPort),
- ncConfig.retries);
+ IIPCHandle ccIPCHandle = ipc.getHandle(
+ new InetSocketAddress(ncConfig.getClusterAddress(), ncConfig.getClusterPort()),
+ ncConfig.getClusterConnectRetries());
this.ccs = new ClusterControllerRemoteProxy(ccIPCHandle);
HeartbeatSchema.GarbageCollectorInfo[] gcInfos = new HeartbeatSchema.GarbageCollectorInfo[gcMXBeans.size()];
for (int i = 0; i < gcInfos.length; ++i) {
@@ -274,10 +292,7 @@
runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(),
runtimeMXBean.getClassPath(), runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(),
runtimeMXBean.getInputArguments(), runtimeMXBean.getSystemProperties(), hbSchema, meesagingPort,
- ncAppEntryPoint == null
- ? new NodeCapacity(Runtime.getRuntime().maxMemory(), allCores > 1 ? allCores - 1 : allCores)
- : ncAppEntryPoint.getCapacity(),
- PidHelper.getPid()));
+ ncAppEntryPoint.getCapacity(), PidHelper.getPid()));
synchronized (this) {
while (registrationPending) {
@@ -307,21 +322,12 @@
}
LOGGER.log(Level.INFO, "Started NodeControllerService");
- if (ncAppEntryPoint != null) {
- ncAppEntryPoint.notifyStartupComplete();
- }
+ ncAppEntryPoint.notifyStartupComplete();
}
private void startApplication() throws Exception {
appCtx = new NCApplicationContext(this, serverCtx, ioManager, id, memoryManager, lccm, ncConfig.getAppConfig());
- String className = ncConfig.appNCMainClass;
- if (className != null) {
- Class<?> c = Class.forName(className);
- ncAppEntryPoint = (INCApplicationEntryPoint) c.newInstance();
- String[] args = ncConfig.appArgs == null ? new String[0]
- : ncConfig.appArgs.toArray(new String[ncConfig.appArgs.size()]);
- ncAppEntryPoint.start(appCtx, args);
- }
+ ncAppEntryPoint.start(appCtx, ncConfig.getAppArgsArray());
executor = Executors.newCachedThreadPool(appCtx.getThreadFactory());
}
@@ -341,10 +347,8 @@
messagingNetManager.stop();
}
workQueue.stop();
- if (ncAppEntryPoint != null) {
- ncAppEntryPoint.stop();
- }
- /**
+ ncAppEntryPoint.stop();
+ /*
* Stop heartbeat after NC has stopped to avoid false node failure detection
* on CC if an NC takes a long time to stop.
*/
@@ -525,4 +529,14 @@
public MessagingNetworkManager getMessagingNetworkManager() {
return messagingNetManager;
}
+
+ private static INCApplicationEntryPoint getApplicationEntryPoint(NCConfig config)
+ throws ClassNotFoundException, IllegalAccessException, InstantiationException {
+ if (config.getAppClass() != null) {
+ Class<?> c = Class.forName(config.getAppClass());
+ return (INCApplicationEntryPoint) c.newInstance();
+ } else {
+ return NCApplicationEntryPoint.INSTANCE;
+ }
+ }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/application/NCApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/application/NCApplicationContext.java
index f52099d..6d549c2 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/application/NCApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/application/NCApplicationContext.java
@@ -22,10 +22,10 @@
import java.io.OutputStream;
import java.io.Serializable;
-import org.apache.hyracks.api.application.IApplicationConfig;
import org.apache.hyracks.api.application.INCApplicationContext;
import org.apache.hyracks.api.application.IStateDumpHandler;
import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
+import org.apache.hyracks.api.config.IApplicationConfig;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponentManager;
import org.apache.hyracks.api.resources.memory.IMemoryManager;
import org.apache.hyracks.api.service.IControllerService;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
index 8d9a93b..ae7f272 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/pom.xml
@@ -31,9 +31,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
- <type>jar</type>
- <scope>compile</scope>
</dependency>
<dependency>
<groupId>org.ini4j</groupId>
@@ -42,6 +39,11 @@
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-api</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
<artifactId>hyracks-control-common</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
index 9b00cc2..6b11ecc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCService.java
@@ -23,6 +23,7 @@
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.StringReader;
+import java.io.StringWriter;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.net.InetAddress;
@@ -35,7 +36,9 @@
import java.util.logging.Logger;
import org.apache.commons.lang3.SystemUtils;
-import org.apache.hyracks.control.common.controllers.IniUtils;
+import org.apache.hyracks.control.common.config.ConfigUtils;
+import org.apache.hyracks.control.common.controllers.NCConfig;
+import org.apache.hyracks.api.config.Section;
import org.apache.hyracks.control.common.controllers.ServiceConstants;
import org.apache.hyracks.control.common.controllers.ServiceConstants.ServiceCommand;
import org.ini4j.Ini;
@@ -85,7 +88,7 @@
// Find the command to run. For now, we allow overriding the name, but
// still assume it's located in the bin/ directory of the deployment.
// Even this is likely more configurability than we need.
- String command = IniUtils.getString(ini, nodeSection, "command", "hyracksnc");
+ String command = ConfigUtils.getString(ini, nodeSection, NCConfig.Option.COMMAND.ini(), "hyracksnc");
// app.home is specified by the Maven appassembler plugin. If it isn't set,
// fall back to user's home dir. Again this is likely more flexibility
// than we need.
@@ -112,7 +115,7 @@
}
private static void configEnvironment(Map<String, String> env) {
- String jvmargs = IniUtils.getString(ini, nodeSection, "jvm.args", null);
+ String jvmargs = ConfigUtils.getString(ini, nodeSection, NCConfig.Option.JVM_ARGS.ini(), null);
if (jvmargs != null) {
LOGGER.info("Using JAVA_OPTS from conf file (jvm.args)");
} else {
@@ -188,7 +191,13 @@
return retval == 0;
} catch (Exception e) {
if (LOGGER.isLoggable(Level.SEVERE)) {
- LOGGER.log(Level.SEVERE, "Configuration from CC broken", e);
+ StringWriter sw = new StringWriter();
+ try {
+ ini.store(sw);
+ LOGGER.log(Level.SEVERE, "Configuration from CC broken: \n" + sw.toString(), e);
+ } catch (IOException e1) {
+ LOGGER.log(Level.SEVERE, "Configuration from CC broken, failed to serialize", e1);
+ }
}
return false;
}
@@ -213,7 +222,7 @@
case START_NC:
String iniString = ois.readUTF();
ini = new Ini(new StringReader(iniString));
- ncId = IniUtils.getString(ini, "localnc", "id", "");
+ ncId = ConfigUtils.getString(ini, Section.LOCALNC, NCConfig.Option.NODE_ID, "");
nodeSection = "nc/" + ncId;
return launchNCProcess();
case TERMINATE:
@@ -272,7 +281,7 @@
try (ServerSocket listener = new ServerSocket(port, 5, addr)) {
boolean launched = false;
while (!launched) {
- LOGGER.info("Waiting for connection from CC on " + addr + ":" + port);
+ LOGGER.info("Waiting for connection from CC on " + (addr == null ? "*" : addr) + ":" + port);
try (Socket socket = listener.accept()) {
// QQQ Because acceptConnection() doesn't return if the
// service is started appropriately, the socket remains
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
index 91c9f6d..10fa679 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
@@ -18,7 +18,7 @@
*/
package org.apache.hyracks.control.nc.service;
-import org.apache.hyracks.control.common.controllers.IniUtils;
+import org.apache.hyracks.control.common.config.ConfigUtils;
import org.ini4j.Ini;
import org.kohsuke.args4j.Option;
@@ -58,10 +58,10 @@
* It does not apply defaults or any logic.
*/
private void loadINIFile() throws IOException {
- ini = IniUtils.loadINIFile(configFile);
- address = IniUtils.getString(ini, "ncservice", "address", address);
- port = IniUtils.getInt(ini, "ncservice", "port", port);
- logdir = IniUtils.getString(ini, "ncservice", "logdir", logdir);
+ ini = ConfigUtils.loadINIFile(configFile);
+ address = ConfigUtils.getString(ini, "ncservice", "address", address);
+ port = ConfigUtils.getInt(ini, "ncservice", "port", port);
+ logdir = ConfigUtils.getString(ini, "ncservice", "logdir", logdir);
}
/**
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
index 6cfaaa5..e7ed599 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/pom.xml
@@ -47,7 +47,7 @@
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>src/test/resources/data/beer.txt</exclude>
</excludes>
</configuration>
diff --git a/hyracks-fullstack/hyracks/hyracks-dist/pom.xml b/hyracks-fullstack/hyracks/hyracks-dist/pom.xml
index b88d292..7b6d7ec 100644
--- a/hyracks-fullstack/hyracks/hyracks-dist/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-dist/pom.xml
@@ -88,7 +88,7 @@
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>src/main/resources/conf/master</exclude>
<exclude>src/main/resources/conf/slaves</exclude>
</excludes>
diff --git a/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh b/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
index 4d2fc51..9794b07 100755
--- a/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
+++ b/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startDebugNc.sh
@@ -65,4 +65,4 @@
cd $NCTMP_DIR2
#Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cluster-address $CCHOST -cluster-port $CC_CLUSTERPORT -address $IPADDR -data-listen-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh b/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
index 5fa2b86..1bbbe10 100755
--- a/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
+++ b/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startcc.sh
@@ -42,8 +42,8 @@
chmod -R 755 $HYRACKS_HOME
if [ -f "conf/topology.xml" ]; then
#Launch hyracks cc script with topology
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-listen-address $CCHOST -address $CCHOST -client-listen-port $CC_CLIENTPORT -cluster-listen-port $CC_CLUSTERPORT -heartbeat-max-misses 999999 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
else
#Launch hyracks cc script without toplogy
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-listen-address $CCHOST -address $CCHOST -client-listen-port $CC_CLIENTPORT -cluster-listen-port $CC_CLUSTERPORT -heartbeat-max-misses 999999 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
fi
diff --git a/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh b/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
index d09b9c1..62d671f 100755
--- a/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
+++ b/hyracks-fullstack/hyracks/hyracks-dist/src/main/resources/bin/startnc.sh
@@ -64,4 +64,4 @@
cd $NCTMP_DIR
#Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
+$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cluster-address $CCHOST -cluster-port $CC_CLUSTERPORT -address $IPADDR -data-listen-address $IPADDR -result-listen-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
index db62a85..2486fe8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -61,7 +61,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/NCApplicationEntryPoint.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/NCApplicationEntryPoint.java
index 5180f23..eec28a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/NCApplicationEntryPoint.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/NCApplicationEntryPoint.java
@@ -20,6 +20,7 @@
import org.apache.hyracks.api.application.INCApplicationContext;
import org.apache.hyracks.api.application.INCApplicationEntryPoint;
+import org.apache.hyracks.api.config.IConfigManager;
import org.apache.hyracks.api.job.resource.NodeCapacity;
public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
@@ -44,4 +45,10 @@
public NodeCapacity getCapacity() {
return new NodeCapacity(Runtime.getRuntime().maxMemory(), Runtime.getRuntime().availableProcessors() - 1);
}
+
+ @Override
+ public void registerConfigOptions(IConfigManager configManager) {
+ // no-op
+ }
+
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index 630b984..b26f00f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -41,7 +41,7 @@
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>data/**</exclude>
</excludes>
</configuration>
@@ -174,5 +174,10 @@
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-util</artifactId>
+ <version>${project.version}</version>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index a7677f8..82fd737 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -18,6 +18,8 @@
*/
package org.apache.hyracks.tests.integration;
+import static org.apache.hyracks.util.file.FileUtil.joinPath;
+
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
@@ -82,47 +84,43 @@
@BeforeClass
public static void init() throws Exception {
CCConfig ccConfig = new CCConfig();
- ccConfig.clientNetIpAddress = "127.0.0.1";
- ccConfig.clientNetPort = 39000;
- ccConfig.clusterNetIpAddress = "127.0.0.1";
- ccConfig.clusterNetPort = 39001;
- ccConfig.profileDumpPeriod = 10000;
+ ccConfig.setClientListenAddress("127.0.0.1");
+ ccConfig.setClientListenPort(39000);
+ ccConfig.setClusterListenAddress("127.0.0.1");
+ ccConfig.setClusterListenPort(39001);
+ ccConfig.setProfileDumpPeriod(10000);
FileUtils.deleteQuietly(new File("target" + File.separator + "data"));
- FileUtils.copyDirectory(new File("data"), new File("target" + File.separator + "data"));
+ FileUtils.copyDirectory(new File("data"), new File(joinPath("target", "data")));
File outDir = new File("target" + File.separator + "ClusterController");
outDir.mkdirs();
File ccRoot = File.createTempFile(AbstractIntegrationTest.class.getName(), ".data", outDir);
ccRoot.delete();
ccRoot.mkdir();
- ccConfig.ccRoot = ccRoot.getAbsolutePath();
+ ccConfig.setRootDir(ccRoot.getAbsolutePath());
cc = new ClusterControllerService(ccConfig);
cc.start();
- NCConfig ncConfig1 = new NCConfig();
- ncConfig1.ccHost = "localhost";
- ncConfig1.ccPort = 39001;
- ncConfig1.clusterNetIPAddress = "127.0.0.1";
- ncConfig1.dataIPAddress = "127.0.0.1";
- ncConfig1.resultIPAddress = "127.0.0.1";
- ncConfig1.nodeId = NC1_ID;
- ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
- + File.separator + "device0";
+ NCConfig ncConfig1 = new NCConfig(NC1_ID);
+ ncConfig1.setClusterAddress("localhost");
+ ncConfig1.setClusterPort(39001);
+ ncConfig1.setClusterListenAddress("127.0.0.1");
+ ncConfig1.setDataListenAddress("127.0.0.1");
+ ncConfig1.setResultListenAddress("127.0.0.1");
+ ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
- NCConfig ncConfig2 = new NCConfig();
- ncConfig2.ccHost = "localhost";
- ncConfig2.ccPort = 39001;
- ncConfig2.clusterNetIPAddress = "127.0.0.1";
- ncConfig2.dataIPAddress = "127.0.0.1";
- ncConfig2.resultIPAddress = "127.0.0.1";
- ncConfig2.nodeId = NC2_ID;
- ncConfig2.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
- + File.separator + "device1";
+ NCConfig ncConfig2 = new NCConfig(NC2_ID);
+ ncConfig2.setClusterAddress("localhost");
+ ncConfig2.setClusterPort(39001);
+ ncConfig2.setClusterListenAddress("127.0.0.1");
+ ncConfig2.setDataListenAddress("127.0.0.1");
+ ncConfig2.setResultListenAddress("127.0.0.1");
+ ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
- hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
+ hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index 3d6ac00..f7959d8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -26,9 +26,9 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.commons.io.FileUtils;
-import org.apache.hyracks.api.application.ICCApplicationContext;
-import org.apache.hyracks.api.application.ICCApplicationEntryPoint;
import org.apache.hyracks.api.client.HyracksConnection;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
@@ -42,6 +42,7 @@
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.api.job.resource.IJobCapacityController;
import org.apache.hyracks.client.dataset.HyracksDataset;
+import org.apache.hyracks.control.cc.CCApplicationEntryPoint;
import org.apache.hyracks.control.cc.ClusterControllerService;
import org.apache.hyracks.control.common.controllers.CCConfig;
import org.apache.hyracks.control.common.controllers.NCConfig;
@@ -54,9 +55,6 @@
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-
public abstract class AbstractMultiNCIntegrationTest {
private static final Logger LOGGER = Logger.getLogger(AbstractMultiNCIntegrationTest.class.getName());
@@ -82,18 +80,18 @@
@BeforeClass
public static void init() throws Exception {
CCConfig ccConfig = new CCConfig();
- ccConfig.clientNetIpAddress = "127.0.0.1";
- ccConfig.clientNetPort = 39000;
- ccConfig.clusterNetIpAddress = "127.0.0.1";
- ccConfig.clusterNetPort = 39001;
- ccConfig.profileDumpPeriod = 10000;
+ ccConfig.setClientListenAddress("127.0.0.1");
+ ccConfig.setClientListenPort(39000);
+ ccConfig.setClusterListenAddress("127.0.0.1");
+ ccConfig.setClusterListenPort(39001);
+ ccConfig.setProfileDumpPeriod(10000);
File outDir = new File("target" + File.separator + "ClusterController");
outDir.mkdirs();
File ccRoot = File.createTempFile(AbstractMultiNCIntegrationTest.class.getName(), ".data", outDir);
ccRoot.delete();
ccRoot.mkdir();
- ccConfig.ccRoot = ccRoot.getAbsolutePath();
- ccConfig.appCCMainClass = DummyApplicationEntryPoint.class.getName();
+ ccConfig.setRootDir(ccRoot.getAbsolutePath());
+ ccConfig.setAppClass(DummyApplicationEntryPoint.class.getName());
cc = new ClusterControllerService(ccConfig);
cc.start();
@@ -102,19 +100,18 @@
File ioDev = new File("target" + File.separator + ASTERIX_IDS[i] + File.separator + "ioDevice");
FileUtils.forceMkdir(ioDev);
FileUtils.copyDirectory(new File("data" + File.separator + "device0"), ioDev);
- NCConfig ncConfig = new NCConfig();
- ncConfig.ccHost = "localhost";
- ncConfig.ccPort = 39001;
- ncConfig.clusterNetIPAddress = "127.0.0.1";
- ncConfig.dataIPAddress = "127.0.0.1";
- ncConfig.resultIPAddress = "127.0.0.1";
- ncConfig.nodeId = ASTERIX_IDS[i];
- ncConfig.ioDevices = ioDev.getAbsolutePath();
+ NCConfig ncConfig = new NCConfig(ASTERIX_IDS[i]);
+ ncConfig.setClusterAddress("localhost");
+ ncConfig.setClusterPort(39001);
+ ncConfig.setClusterListenAddress("127.0.0.1");
+ ncConfig.setDataListenAddress("127.0.0.1");
+ ncConfig.setResultListenAddress("127.0.0.1");
+ ncConfig.setIODevices(new String [] { ioDev.getAbsolutePath() });
asterixNCs[i] = new NodeControllerService(ncConfig);
asterixNCs[i].start();
}
- hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
+ hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
}
@@ -219,22 +216,7 @@
return tempFile;
}
- public static class DummyApplicationEntryPoint implements ICCApplicationEntryPoint {
-
- @Override
- public void start(ICCApplicationContext ccAppCtx, String[] args) throws Exception {
-
- }
-
- @Override
- public void stop() throws Exception {
-
- }
-
- @Override
- public void startupCompleted() throws Exception {
-
- }
+ public static class DummyApplicationEntryPoint extends CCApplicationEntryPoint {
@Override
public IJobCapacityController getJobCapacityController() {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/NodesAPIIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/NodesAPIIntegrationTest.java
index 6f05600..672d2c4 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/NodesAPIIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/NodesAPIIntegrationTest.java
@@ -48,7 +48,7 @@
"net-signaling-bytes-written", "dataset-net-payload-bytes-read", "dataset-net-payload-bytes-written",
"dataset-net-signaling-bytes-read", "dataset-net-signaling-bytes-written", "ipc-messages-sent",
"ipc-message-bytes-sent", "ipc-messages-received", "ipc-message-bytes-received", "disk-reads",
- "disk-writes", "ini" };
+ "disk-writes", "config" };
public static final String ROOT_PATH = "/rest/nodes";
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/PredistributedJobsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/PredistributedJobsTest.java
index 2509515..f911a75 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/PredistributedJobsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/PredistributedJobsTest.java
@@ -18,6 +18,7 @@
*/
package org.apache.hyracks.tests.integration;
+import static org.apache.hyracks.util.file.FileUtil.joinPath;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.verify;
@@ -54,50 +55,46 @@
@BeforeClass
public static void init() throws Exception {
CCConfig ccConfig = new CCConfig();
- ccConfig.clientNetIpAddress = "127.0.0.1";
- ccConfig.clientNetPort = 39000;
- ccConfig.clusterNetIpAddress = "127.0.0.1";
- ccConfig.clusterNetPort = 39001;
- ccConfig.profileDumpPeriod = 10000;
- FileUtils.deleteQuietly(new File("target" + File.separator + "data"));
- FileUtils.copyDirectory(new File("data"), new File("target" + File.separator + "data"));
+ ccConfig.setClientListenAddress("127.0.0.1");
+ ccConfig.setClientListenPort(39000);
+ ccConfig.setClusterListenAddress("127.0.0.1");
+ ccConfig.setClusterListenPort(39001);
+ ccConfig.setProfileDumpPeriod(10000);
+ FileUtils.deleteQuietly(new File(joinPath("target", "data")));
+ FileUtils.copyDirectory(new File("data"), new File(joinPath("target", "data")));
File outDir = new File("target" + File.separator + "ClusterController");
outDir.mkdirs();
File ccRoot = File.createTempFile(AbstractIntegrationTest.class.getName(), ".data", outDir);
ccRoot.delete();
ccRoot.mkdir();
- ccConfig.ccRoot = ccRoot.getAbsolutePath();
+ ccConfig.setRootDir(ccRoot.getAbsolutePath());
ClusterControllerService ccBase = new ClusterControllerService(ccConfig);
cc = Mockito.spy(ccBase);
cc.start();
- NCConfig ncConfig1 = new NCConfig();
- ncConfig1.ccHost = "localhost";
- ncConfig1.ccPort = 39001;
- ncConfig1.clusterNetIPAddress = "127.0.0.1";
- ncConfig1.dataIPAddress = "127.0.0.1";
- ncConfig1.resultIPAddress = "127.0.0.1";
- ncConfig1.nodeId = NC1_ID;
- ncConfig1.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
- + File.separator + "device0";
+ NCConfig ncConfig1 = new NCConfig(NC1_ID);
+ ncConfig1.setClusterAddress("localhost");
+ ncConfig1.setClusterPort(39001);
+ ncConfig1.setClusterListenAddress("127.0.0.1");
+ ncConfig1.setDataListenAddress("127.0.0.1");
+ ncConfig1.setResultListenAddress("127.0.0.1");
+ ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
NodeControllerService nc1Base = new NodeControllerService(ncConfig1);
nc1 = Mockito.spy(nc1Base);
nc1.start();
- NCConfig ncConfig2 = new NCConfig();
- ncConfig2.ccHost = "localhost";
- ncConfig2.ccPort = 39001;
- ncConfig2.clusterNetIPAddress = "127.0.0.1";
- ncConfig2.dataIPAddress = "127.0.0.1";
- ncConfig2.resultIPAddress = "127.0.0.1";
- ncConfig2.nodeId = NC2_ID;
- ncConfig2.ioDevices = System.getProperty("user.dir") + File.separator + "target" + File.separator + "data"
- + File.separator + "device1";
+ NCConfig ncConfig2 = new NCConfig(NC2_ID);
+ ncConfig2.setClusterAddress("localhost");
+ ncConfig2.setClusterPort(39001);
+ ncConfig2.setClusterListenAddress("127.0.0.1");
+ ncConfig2.setDataListenAddress("127.0.0.1");
+ ncConfig2.setResultListenAddress("127.0.0.1");
+ ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
NodeControllerService nc2Base = new NodeControllerService(ncConfig2);
nc2 = Mockito.spy(nc2Base);
nc2.start();
- hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
+ hcc = new HyracksConnection(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
index 0e9d24e..42a104d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/pom.xml
@@ -40,9 +40,10 @@
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<configuration>
- <failOnWarning>true</failOnWarning>
- <outputXML>true</outputXML>
- <usedDependencies>org.apache.hyracks:hyracks-control-nc,org.apache.hyracks:hyracks-control-cc</usedDependencies>
+ <usedDependencies combine.children="append">
+ <usedDependency>org.apache.hyracks:hyracks-control-nc</usedDependency>
+ <usedDependency>org.apache.hyracks:hyracks-control-cc</usedDependency>
+ </usedDependencies>
</configuration>
<executions>
<execution>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
index e1155b7..9003724 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -51,7 +51,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
index 394fa11..ed269ea 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/text-example/textserver/pom.xml
@@ -40,10 +40,11 @@
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<configuration>
- <failOnWarning>true</failOnWarning>
- <outputXML>true</outputXML>
- <usedDependencies>
- org.apache.hyracks:hyracks-control-nc,org.apache.hyracks:hyracks-control-cc,org.apache.hyracks:hyracks-dataflow-std,org.apache.hyracks:texthelper
+ <usedDependencies combine.children="append">
+ <usedDependency>org.apache.hyracks:hyracks-control-nc</usedDependency>
+ <usedDependency>org.apache.hyracks:hyracks-control-cc</usedDependency>
+ <usedDependency>org.apache.hyracks:hyracks-dataflow-std</usedDependency>
+ <usedDependency>org.apache.hyracks:texthelper</usedDependency>
</usedDependencies>
</configuration>
<executions>
@@ -103,7 +104,7 @@
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>data/file1.txt</exclude>
<exclude>data/file2.txt</exclude>
</excludes>
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
index 5bd7796..f8378c6 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -45,7 +45,6 @@
<dependency>
<groupId>args4j</groupId>
<artifactId>args4j</artifactId>
- <version>2.0.12</version>
</dependency>
<dependency>
<groupId>org.apache.hyracks</groupId>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
index b4abc4a..a8778d6 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -66,7 +66,7 @@
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>src/test/resources/data/customer.tbl</exclude>
<exclude>src/test/resources/expected/part-0</exclude>
</excludes>
@@ -406,5 +406,11 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hyracks</groupId>
+ <artifactId>hyracks-util</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
index 2307a43..bf7f2a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
@@ -19,8 +19,6 @@
package org.apache.hyracks.hdfs.dataflow;
-import static org.apache.hyracks.test.support.TestUtils.joinPath;
-
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
@@ -61,6 +59,7 @@
import org.apache.hyracks.hdfs.scheduler.Scheduler;
import org.apache.hyracks.hdfs.utils.HyracksUtils;
import org.apache.hyracks.hdfs.utils.TestUtils;
+import org.apache.hyracks.util.file.FileUtil;
/**
* Test the org.apache.hyracks.hdfs.dataflow package,
@@ -69,19 +68,19 @@
@SuppressWarnings({ "deprecation" })
public class DataflowTest extends TestCase {
- protected static final String ACTUAL_RESULT_DIR = joinPath("target", "actual");
- private static final String TEST_RESOURCES = joinPath("src", "test", "resources");
- protected static final String EXPECTED_RESULT_PATH = joinPath(TEST_RESOURCES, "expected");
- private static final String PATH_TO_HADOOP_CONF = joinPath(TEST_RESOURCES, "hadoop", "conf");
- protected static final String BUILD_DIR = joinPath("target", "build");
+ protected static final String ACTUAL_RESULT_DIR = FileUtil.joinPath("target", "actual");
+ private static final String TEST_RESOURCES = FileUtil.joinPath("src", "test", "resources");
+ protected static final String EXPECTED_RESULT_PATH = FileUtil.joinPath(TEST_RESOURCES, "expected");
+ private static final String PATH_TO_HADOOP_CONF = FileUtil.joinPath(TEST_RESOURCES, "hadoop", "conf");
+ protected static final String BUILD_DIR = FileUtil.joinPath("target", "build");
- private static final String DATA_PATH = joinPath(TEST_RESOURCES, "data", "customer.tbl");
+ private static final String DATA_PATH = FileUtil.joinPath(TEST_RESOURCES, "data", "customer.tbl");
protected static final String HDFS_INPUT_PATH = "/customer/";
protected static final String HDFS_OUTPUT_PATH = "/customer_result/";
private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
- private static final String MINIDFS_BASEDIR = joinPath("target", "hdfs");
+ private static final String MINIDFS_BASEDIR = FileUtil.joinPath("target", "hdfs");
private MiniDFSCluster dfsCluster;
private JobConf conf = new JobConf();
@@ -121,8 +120,8 @@
FileSystem lfs = FileSystem.getLocal(new Configuration());
lfs.delete(new Path(BUILD_DIR), true);
- System.setProperty("hadoop.log.dir", joinPath("target", "logs"));
- getConfiguration().set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, MINIDFS_BASEDIR);
+ System.setProperty("hadoop.log.dir", FileUtil.joinPath("target", "logs"));
+ getConfiguration().set("hdfs.minidfs.basedir", MINIDFS_BASEDIR);
dfsCluster = getMiniDFSCluster(getConfiguration(), numberOfNC);
FileSystem dfs = FileSystem.get(getConfiguration());
Path src = new Path(DATA_PATH);
@@ -197,8 +196,8 @@
Path actual = new Path(ACTUAL_RESULT_DIR);
dfs.copyToLocalFile(result, actual);
- TestUtils.compareWithResult(new File(joinPath(EXPECTED_RESULT_PATH, "part-0")), new File(
- joinPath(ACTUAL_RESULT_DIR, "customer_result", "part-0")));
+ TestUtils.compareWithResult(new File(FileUtil.joinPath(EXPECTED_RESULT_PATH, "part-0")), new File(
+ FileUtil.joinPath(ACTUAL_RESULT_DIR, "customer_result", "part-0")));
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/utils/HyracksUtils.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/utils/HyracksUtils.java
index cc32527..1fddc46 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/utils/HyracksUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/utils/HyracksUtils.java
@@ -50,36 +50,33 @@
public static void init() throws Exception {
CCConfig ccConfig = new CCConfig();
- ccConfig.clientNetIpAddress = CC_HOST;
- ccConfig.clusterNetIpAddress = CC_HOST;
- ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
- ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
- ccConfig.defaultMaxJobAttempts = 0;
- ccConfig.jobHistorySize = 0;
- ccConfig.profileDumpPeriod = -1;
+ ccConfig.setClientListenAddress(CC_HOST);
+ ccConfig.setClusterListenAddress(CC_HOST);
+ ccConfig.setClusterListenPort(TEST_HYRACKS_CC_PORT);
+ ccConfig.setClientListenPort(TEST_HYRACKS_CC_CLIENT_PORT);
+ ccConfig.setJobHistorySize(0);
+ ccConfig.setProfileDumpPeriod(-1);
// cluster controller
cc = new ClusterControllerService(ccConfig);
cc.start();
// two node controllers
- NCConfig ncConfig1 = new NCConfig();
- ncConfig1.ccHost = "localhost";
- ncConfig1.clusterNetIPAddress = "localhost";
- ncConfig1.ccPort = TEST_HYRACKS_CC_PORT;
- ncConfig1.dataIPAddress = "127.0.0.1";
- ncConfig1.resultIPAddress = "127.0.0.1";
- ncConfig1.nodeId = NC1_ID;
+ NCConfig ncConfig1 = new NCConfig(NC1_ID);
+ ncConfig1.setClusterAddress("localhost");
+ ncConfig1.setClusterListenAddress("localhost");
+ ncConfig1.setClusterPort(TEST_HYRACKS_CC_PORT);
+ ncConfig1.setDataListenAddress("127.0.0.1");
+ ncConfig1.setResultListenAddress("127.0.0.1");
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
- NCConfig ncConfig2 = new NCConfig();
- ncConfig2.ccHost = "localhost";
- ncConfig2.clusterNetIPAddress = "localhost";
- ncConfig2.ccPort = TEST_HYRACKS_CC_PORT;
- ncConfig2.dataIPAddress = "127.0.0.1";
- ncConfig2.resultIPAddress = "127.0.0.1";
- ncConfig2.nodeId = NC2_ID;
+ NCConfig ncConfig2 = new NCConfig(NC2_ID);
+ ncConfig2.setClusterAddress("localhost");
+ ncConfig2.setClusterListenAddress("localhost");
+ ncConfig2.setClusterPort(TEST_HYRACKS_CC_PORT);
+ ncConfig2.setDataListenAddress("127.0.0.1");
+ ncConfig2.setResultListenAddress("127.0.0.1");
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
index cb9deea..e4d9005 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/AbstractServlet.java
@@ -38,7 +38,7 @@
protected final ConcurrentMap<String, Object> ctx;
private final int[] trims;
- public AbstractServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public AbstractServlet(ConcurrentMap<String, Object> ctx, String... paths) {
this.paths = paths;
this.ctx = ctx;
trims = new int[paths.length];
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
index 9ee135b..fe2bcae 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
@@ -64,14 +64,14 @@
IPCConnectionManager(IPCSystem system, InetSocketAddress socketAddress) throws IOException {
this.system = system;
- this.networkThread = new NetworkThread();
- this.networkThread.setPriority(Thread.MAX_PRIORITY);
this.serverSocketChannel = ServerSocketChannel.open();
serverSocketChannel.socket().setReuseAddress(true);
serverSocketChannel.configureBlocking(false);
ServerSocket socket = serverSocketChannel.socket();
socket.bind(socketAddress);
address = new InetSocketAddress(socket.getInetAddress(), socket.getLocalPort());
+ networkThread = new NetworkThread();
+ networkThread.setPriority(Thread.MAX_PRIORITY);
ipcHandleMap = new HashMap<>();
pendingConnections = new ArrayList<>();
workingPendingConnections = new ArrayList<>();
@@ -175,7 +175,7 @@
private final Selector selector;
public NetworkThread() {
- super("IPC Network Listener Thread");
+ super("IPC Network Listener Thread [" + address + "]");
setDaemon(true);
try {
selector = Selector.open();
@@ -323,7 +323,7 @@
failingLoops = 0;
} catch (Exception e) {
int sleepSecs = (int)Math.pow(2, Math.min(11, failingLoops++));
- LOGGER.log(Level.WARNING, "Exception processing message; sleeping " + sleepSecs
+ LOGGER.log(Level.SEVERE, "Exception processing message; sleeping " + sleepSecs
+ " seconds", e);
try {
Thread.sleep(TimeUnit.SECONDS.toMillis(sleepSecs));
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksCCStartMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksCCStartMojo.java
index a461064..5a9d9dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksCCStartMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksCCStartMojo.java
@@ -40,8 +40,8 @@
if (port != 0) {
cmdLineBuffer.append("-port ").append(port);
}
- cmdLineBuffer.append(" -client-net-ip-address 127.0.0.1");
- cmdLineBuffer.append(" -cluster-net-ip-address 127.0.0.1");
+ cmdLineBuffer.append(" -client-listen-address 127.0.0.1");
+ cmdLineBuffer.append(" -address 127.0.0.1");
String args = cmdLineBuffer.toString();
final Process proc = launch(new File(hyracksServerHome, makeScriptName(HYRACKS_CC_SCRIPT)), args, workingDir);
HyracksServiceRegistry.INSTANCE.addServiceProcess(proc);
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksNCStartMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksNCStartMojo.java
index 1f0e640..926e108 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksNCStartMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/HyracksNCStartMojo.java
@@ -55,13 +55,13 @@
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
StringBuilder cmdLineBuffer = new StringBuilder();
- cmdLineBuffer.append(" -cc-host ").append(ccHost);
- cmdLineBuffer.append(" -data-ip-address ").append(dataIpAddress);
+ cmdLineBuffer.append(" -cluster-address ").append(ccHost);
+ cmdLineBuffer.append(" -data-listen-address ").append(dataIpAddress);
cmdLineBuffer.append(" -node-id ").append(nodeId);
- cmdLineBuffer.append(" -cluster-net-ip-address 127.0.0.1");
- cmdLineBuffer.append(" -result-ip-address 127.0.0.1");
+ cmdLineBuffer.append(" -address 127.0.0.1");
+ cmdLineBuffer.append(" -result-listen-address 127.0.0.1");
if (ccPort != 0) {
- cmdLineBuffer.append(" -cc-port ").append(ccPort);
+ cmdLineBuffer.append(" -cluster-port ").append(ccPort);
}
String args = cmdLineBuffer.toString();
final Process proc = launch(new File(hyracksServerHome, makeScriptName(HYRACKS_NC_SCRIPT)), args, workingDir);
diff --git a/hyracks-fullstack/hyracks/hyracks-server/docs/README b/hyracks-fullstack/hyracks/hyracks-server/docs/README
index 06bb1e1..44cdcd8 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/docs/README
+++ b/hyracks-fullstack/hyracks/hyracks-server/docs/README
@@ -19,15 +19,15 @@
The node controller is started by running bin/hyracksnc. It requires at least the following two command line arguments.
- -cc-host VAL : Cluster Controller host name
- -data-ip-address VAL : IP Address to bind data listener
+ -cluster-address VAL : Cluster Controller host name
+ -data-listen-address VAL : IP Address to bind data listener
If the cluster controller was directed to listen on a port other than the default, you will need to pass one more argument to hyracksnc.
- -cc-port N : Cluster Controller port (default: 1099)
+ -cluster-port N : Cluster Controller port (default: 1099)
-The data-ip-address is the interface on which the Node Controller must listen on -- in the event the machine is multi-homed it must listen on an IP that is reachable from
-other Node Controllers. Make sure that the value passed to the data-ip-address is a valid IPv4 address (four octets separated by .).
+The data-listen-address is the interface on which the Node Controller must listen on -- in the event the machine is multi-homed it must listen on an IP that is reachable from
+other Node Controllers. Make sure that the value passed to the data-listen-address is a valid IPv4 address (four octets separated by .).
3. Running a job on Hyracks
diff --git a/hyracks-fullstack/hyracks/hyracks-server/pom.xml b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
index ded28ad..5e36b8a 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-server/pom.xml
@@ -47,9 +47,9 @@
<artifactId>maven-dependency-plugin</artifactId>
<version>2.10</version>
<configuration>
- <failOnWarning>true</failOnWarning>
- <outputXML>true</outputXML>
- <usedDependencies>org.apache.hyracks:hyracks-control-nc</usedDependencies>
+ <usedDependencies combine.children="append">
+ <usedDependency>org.apache.hyracks:hyracks-control-nc</usedDependency>
+ </usedDependencies>
</configuration>
<executions>
<execution>
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksCCProcess.java b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksCCProcess.java
index 4a70120..b2aa2d1 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksCCProcess.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksCCProcess.java
@@ -18,11 +18,11 @@
*/
package org.apache.hyracks.server.process;
-import org.apache.hyracks.control.cc.CCDriver;
-
import java.io.File;
import java.util.List;
+import org.apache.hyracks.control.cc.CCDriver;
+
public class HyracksCCProcess extends HyracksServerProcess {
public HyracksCCProcess(File configFile, File logFile, File appHome, File workingDir) {
@@ -41,5 +41,6 @@
protected void addJvmArgs(List<String> cList) {
// CC needs more than default memory
cList.add("-Xmx1024m");
+ //cList.add("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
index 2185826..fda099e 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/test/java/org/apache/hyracks/server/test/NCServiceIT.java
@@ -34,8 +34,6 @@
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.apache.hyracks.server.process.HyracksVirtualCluster;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/test/resources/NCServiceIT/cc.conf b/hyracks-fullstack/hyracks/hyracks-server/src/test/resources/NCServiceIT/cc.conf
index 2339efb..69676f7 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/test/resources/NCServiceIT/cc.conf
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/test/resources/NCServiceIT/cc.conf
@@ -16,13 +16,15 @@
; under the License.
[nc/red]
-address=127.0.0.1
+address = 127.0.0.1
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5006
[nc/blue]
-address=127.0.0.1
-port=9091
+address = 127.0.0.1
+ncservice.port = 9091
+#jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5007
[cc]
-cluster.address = 127.0.0.1
-http.port = 12345
+address = 127.0.0.1
+console.listen.port = 12345
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
index 9e0535b..9a51b6c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/pom.xml
@@ -105,9 +105,5 @@
<artifactId>hyracks-util</artifactId>
<version>${project.version}</version>
</dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-lang3</artifactId>
- </dependency>
</dependencies>
</project>
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java
index 87739ea..81ee47b 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestNCApplicationContext.java
@@ -21,10 +21,10 @@
import java.io.Serializable;
import java.util.concurrent.ThreadFactory;
-import org.apache.hyracks.api.application.IApplicationConfig;
import org.apache.hyracks.api.application.INCApplicationContext;
import org.apache.hyracks.api.application.IStateDumpHandler;
import org.apache.hyracks.api.comm.IChannelInterfaceFactory;
+import org.apache.hyracks.api.config.IApplicationConfig;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.api.job.IJobSerializerDeserializerContainer;
import org.apache.hyracks.api.lifecycle.ILifeCycleComponentManager;
@@ -32,7 +32,6 @@
import org.apache.hyracks.api.messages.IMessageBroker;
import org.apache.hyracks.api.resources.memory.IMemoryManager;
import org.apache.hyracks.api.service.IControllerService;
-import org.apache.hyracks.control.nc.io.IOManager;
public class TestNCApplicationContext implements INCApplicationContext {
private final ILifeCycleComponentManager lccm;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestUtils.java
index 039cf7d..ab87f93 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/test/support/TestUtils.java
@@ -23,7 +23,6 @@
import java.util.List;
import java.util.concurrent.Executors;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.api.application.INCApplicationContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.ActivityId;
@@ -54,8 +53,4 @@
devices.add(new IODeviceHandle(new File(System.getProperty("java.io.tmpdir")), "."));
return new IOManager(devices, Executors.newCachedThreadPool());
}
-
- public static String joinPath(String... pathElements) {
- return StringUtils.join(pathElements, File.separatorChar);
- }
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
index 31bce7a..a9a529ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
@@ -66,7 +66,7 @@
throw new AssertionError("This util class should not be initialized.");
}
- public static int getSizeInBytes(final int size, final StorageUnit unit) {
+ public static int getIntSizeInBytes(final int size, final StorageUnit unit) {
double result = unit.toBytes(size);
if (result > Integer.MAX_VALUE || result < Integer.MIN_VALUE) {
throw new IllegalArgumentException("The given value:" + result + " is not within the integer range.");
@@ -75,7 +75,7 @@
}
}
- public static long getSizeInBytes(final long size, final StorageUnit unit) {
+ public static long getLongSizeInBytes(final long size, final StorageUnit unit) {
double result = unit.toBytes(size);
if (result > Long.MAX_VALUE || result < Long.MIN_VALUE) {
throw new IllegalArgumentException("The given value:" + result + " is not within the long range.");
@@ -85,8 +85,7 @@
}
/**
- * Helper method to parse a byte unit string to its double value and unit
- * (e.g., 10,345.8MB becomes Pair<10345.8, StorageUnit.MB>.)
+ * Helper method to parse a byte unit string to its double value in bytes
*
* @throws IllegalArgumentException
*/
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/file/FileUtil.java
similarity index 68%
copy from asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http
copy to hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/file/FileUtil.java
index 5976b5d..d44f1b4 100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/api/replication/replication.1.get.http
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/file/FileUtil.java
@@ -16,10 +16,17 @@
* specific language governing permissions and limitations
* under the License.
*/
-/*
- * Test case Name : replication
- * Description : Replication
- * Expected Result : Positive
- * Date : 28th October 2016
- */
-/admin/cluster/replication
+package org.apache.hyracks.util.file;
+
+import java.io.File;
+
+public class FileUtil {
+ private FileUtil() {
+ }
+
+ public static String joinPath(String... elements) {
+ return String.join(File.separator, elements)
+ .replaceAll("([^:])(" + File.separator + ")+", "$1$2")
+ .replaceAll(File.separator + "$", "");
+ }
+}
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index cbb83aa..3bf472d 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -140,6 +140,11 @@
<version>3.5</version>
</dependency>
<dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-collections4</artifactId>
+ <version>4.1</version>
+ </dependency>
+ <dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.5</version>
@@ -154,6 +159,11 @@
<artifactId>apache-rat-plugin</artifactId>
<version>0.12</version>
</dependency>
+ <dependency>
+ <groupId>args4j</groupId>
+ <artifactId>args4j</artifactId>
+ <version>2.33</version>
+ </dependency>
</dependencies>
</dependencyManagement>
@@ -164,7 +174,7 @@
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.0</version>
<configuration>
- <excludes>
+ <excludes combine.children="append">
<exclude>**/DEPENDENCIES</exclude>
</excludes>
</configuration>
@@ -176,6 +186,9 @@
<configuration>
<failOnWarning>true</failOnWarning>
<outputXML>true</outputXML>
+ <usedDependencies>
+ <usedDependency>org.slf4j:slf4j-simple</usedDependency>
+ </usedDependencies>
</configuration>
<executions>
<execution>
@@ -212,6 +225,10 @@
<licenseFamily implementation="org.apache.rat.license.Apache20LicenseFamily"/>
</licenseFamilies>
<excludeSubProjects>true</excludeSubProjects>
+ <excludes combine.children="append">
+ <!-- TODO (mblow): ClusterControllerService should not get written outside of **/target/; remove once fixed -->
+ <exclude>**/ClusterControllerService/**</exclude>
+ </excludes>
</configuration>
</plugin>
<plugin>
@@ -232,7 +249,7 @@
<includes>
<include>${global.test.includes},${test.includes}</include>
</includes>
- <excludes>
+ <excludes combine.children="append">
<exclude>${global.test.excludes},${test.excludes}</exclude>
</excludes>
</configuration>
@@ -507,4 +524,14 @@
<module>algebricks</module>
<module>hyracks-fullstack-license</module>
</modules>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-simple</artifactId>
+ <version>1.7.22</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
</project>