Remove static cc application context instance
Change-Id: Ia2e250405967ec880e7af6387aa981f39b3392c0
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1606
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Michael Blow <mblow@apache.org>
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java
index d7998f8..b4ed8e5 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveJobNotificationHandler.java
@@ -30,7 +30,6 @@
import org.apache.hyracks.api.job.JobSpecification;
public class ActiveJobNotificationHandler implements Runnable {
- public static final ActiveJobNotificationHandler INSTANCE = new ActiveJobNotificationHandler();
public static final String ACTIVE_ENTITY_PROPERTY_NAME = "ActiveJob";
private static final Logger LOGGER = Logger.getLogger(ActiveJobNotificationHandler.class.getName());
private static final boolean DEBUG = false;
@@ -38,7 +37,7 @@
private final Map<EntityId, IActiveEntityEventsListener> entityEventListeners;
private final Map<JobId, EntityId> jobId2ActiveJobInfos;
- private ActiveJobNotificationHandler() {
+ public ActiveJobNotificationHandler() {
this.eventInbox = new LinkedBlockingQueue<>();
this.jobId2ActiveJobInfos = new HashMap<>();
this.entityEventListeners = new HashMap<>();
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java
index 6a10b0c..86c3e7d 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveLifecycleListener.java
@@ -34,20 +34,21 @@
public class ActiveLifecycleListener implements IJobLifecycleListener {
private static final Logger LOGGER = Logger.getLogger(ActiveLifecycleListener.class.getName());
- public static final ActiveLifecycleListener INSTANCE = new ActiveLifecycleListener();
+ private final ActiveJobNotificationHandler notificationHandler;
private final LinkedBlockingQueue<ActiveEvent> jobEventInbox;
private final ExecutorService executorService;
- private ActiveLifecycleListener() {
- jobEventInbox = ActiveJobNotificationHandler.INSTANCE.getEventInbox();
+ public ActiveLifecycleListener() {
+ notificationHandler = new ActiveJobNotificationHandler();
+ jobEventInbox = notificationHandler.getEventInbox();
executorService = Executors.newSingleThreadExecutor();
- executorService.execute(ActiveJobNotificationHandler.INSTANCE);
+ executorService.execute(notificationHandler);
}
@Override
public synchronized void notifyJobStart(JobId jobId) throws HyracksException {
- EntityId entityId = ActiveJobNotificationHandler.INSTANCE.getEntity(jobId);
+ EntityId entityId = notificationHandler.getEntity(jobId);
if (entityId != null) {
jobEventInbox.add(new ActiveEvent(jobId, Kind.JOB_STARTED, entityId));
}
@@ -55,7 +56,7 @@
@Override
public synchronized void notifyJobFinish(JobId jobId) throws HyracksException {
- EntityId entityId = ActiveJobNotificationHandler.INSTANCE.getEntity(jobId);
+ EntityId entityId = notificationHandler.getEntity(jobId);
if (entityId != null) {
jobEventInbox.add(new ActiveEvent(jobId, Kind.JOB_FINISHED, entityId));
} else {
@@ -67,7 +68,7 @@
@Override
public void notifyJobCreation(JobId jobId, JobSpecification spec) throws HyracksException {
- ActiveJobNotificationHandler.INSTANCE.notifyJobCreation(jobId, spec);
+ notificationHandler.notifyJobCreation(jobId, spec);
}
public void receive(ActivePartitionMessage message) {
@@ -78,4 +79,8 @@
public void stop() {
executorService.shutdown();
}
+
+ public ActiveJobNotificationHandler getNotificationHandler() {
+ return notificationHandler;
+ }
}
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
index ac3caf3..98a6979 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveSourceOperatorNodePushable.java
@@ -22,7 +22,7 @@
import java.util.logging.Logger;
import org.apache.asterix.active.message.ActivePartitionMessage;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -40,7 +40,7 @@
public ActiveSourceOperatorNodePushable(IHyracksTaskContext ctx, ActiveRuntimeId runtimeId) {
this.ctx = ctx;
- activeManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+ activeManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
.getApplicationContext()).getActiveManager();
this.runtimeId = runtimeId;
}
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
index da3ac1c..231ec25 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActiveManagerMessage.java
@@ -21,12 +21,11 @@
import java.io.Serializable;
import org.apache.asterix.active.ActiveManager;
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class ActiveManagerMessage implements IApplicationMessage {
+public class ActiveManagerMessage implements INcAddressedMessage {
public static final byte STOP_ACTIVITY = 0x00;
private static final long serialVersionUID = 1L;
@@ -53,9 +52,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
- ((ActiveManager) appContext.getActiveManager()).submit(this);
+ public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((ActiveManager) appCtx.getActiveManager()).submit(this);
}
@Override
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
index 3c7aa06..335121a 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/message/ActivePartitionMessage.java
@@ -22,12 +22,12 @@
import org.apache.asterix.active.ActiveLifecycleListener;
import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.service.IControllerService;
-public class ActivePartitionMessage implements IApplicationMessage {
+public class ActivePartitionMessage implements ICcAddressedMessage {
public static final byte ACTIVE_RUNTIME_REGISTERED = 0x00;
public static final byte ACTIVE_RUNTIME_DEREGISTERED = 0x01;
@@ -66,8 +66,9 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- ActiveLifecycleListener.INSTANCE.receive(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ activeListener.receive(this);
}
@Override
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
index 5eae36b..3d7ba34 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/InvertedIndexPOperator.java
@@ -22,7 +22,7 @@
import java.util.List;
import java.util.Map;
-import org.apache.asterix.common.dataflow.IApplicationContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.metadata.MetadataException;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.declared.DataSourceId;
@@ -234,7 +234,7 @@
invertedIndexFieldsForNonBulkLoadOps[k] = k;
}
}
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint =
metadataProvider.getSplitProviderAndConstraints(dataset, indexName);
// TODO: Here we assume there is only one search key field.
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java
index 32d766b..2db5d9a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/DefaultRuleSetFactory.java
@@ -21,6 +21,7 @@
import java.util.ArrayList;
import java.util.List;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.optimizer.base.RuleCollections;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
@@ -32,17 +33,19 @@
public class DefaultRuleSetFactory implements IRuleSetFactory {
@Override
- public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites()
- throws AlgebricksException {
- return buildLogical();
+ public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites(
+ ICcApplicationContext appCtx) throws AlgebricksException {
+ return buildLogical(appCtx);
}
@Override
- public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites() {
- return buildPhysical();
+ public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites(
+ ICcApplicationContext appCtx) {
+ return buildPhysical(appCtx);
}
- public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildLogical() {
+ public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildLogical(
+ ICcApplicationContext appCtx) {
List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultLogicalRewrites = new ArrayList<>();
SequentialFixpointRuleController seqCtrlNoDfs = new SequentialFixpointRuleController(false);
SequentialFixpointRuleController seqCtrlFullDfs = new SequentialFixpointRuleController(true);
@@ -51,17 +54,18 @@
defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildTypeInferenceRuleCollection()));
defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildAutogenerateIDRuleCollection()));
defaultLogicalRewrites
- .add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection()));
+ .add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection(appCtx)));
defaultLogicalRewrites
.add(new Pair<>(seqCtrlNoDfs, RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
- defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection()));
+ defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection(appCtx)));
// fj
defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildFuzzyJoinRuleCollection()));
//
- defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection()));
+ defaultLogicalRewrites
+ .add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildNormalizationRuleCollection(appCtx)));
defaultLogicalRewrites
.add(new Pair<>(seqCtrlNoDfs, RuleCollections.buildCondPushDownAndJoinInferenceRuleCollection()));
- defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection()));
+ defaultLogicalRewrites.add(new Pair<>(seqCtrlFullDfs, RuleCollections.buildLoadFieldsRuleCollection(appCtx)));
defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildDataExchangeRuleCollection()));
defaultLogicalRewrites.add(new Pair<>(seqCtrlNoDfs, RuleCollections.buildConsolidationRuleCollection()));
defaultLogicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.buildFulltextContainsRuleCollection()));
@@ -72,14 +76,15 @@
return defaultLogicalRewrites;
}
- public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildPhysical() {
+ public static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildPhysical(
+ ICcApplicationContext appCtx) {
List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> defaultPhysicalRewrites = new ArrayList<>();
SequentialOnceRuleController seqOnceCtrl = new SequentialOnceRuleController(true);
SequentialOnceRuleController seqOnceTopLevel = new SequentialOnceRuleController(false);
defaultPhysicalRewrites
.add(new Pair<>(seqOnceCtrl, RuleCollections.buildPhysicalRewritesAllLevelsRuleCollection()));
defaultPhysicalRewrites
- .add(new Pair<>(seqOnceTopLevel, RuleCollections.buildPhysicalRewritesTopLevelRuleCollection()));
+ .add(new Pair<>(seqOnceTopLevel, RuleCollections.buildPhysicalRewritesTopLevelRuleCollection(appCtx)));
defaultPhysicalRewrites.add(new Pair<>(seqOnceCtrl, RuleCollections.prepareForJobGenRuleCollection()));
return defaultPhysicalRewrites;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
index af07355..dabac3d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
@@ -20,6 +20,7 @@
import java.util.List;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.rewriter.base.AbstractRuleController;
@@ -31,13 +32,15 @@
* @return the logical rewrites
* @throws AlgebricksException
*/
- public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites()
+ public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites(
+ ICcApplicationContext appCtx)
throws AlgebricksException;
/**
* @return the physical rewrites
*/
- public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites()
+ public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites(
+ ICcApplicationContext appCtx)
throws AlgebricksException;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java
index 54bb0bd..9f88bb6 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/jobgen/QueryLogicalExpressionJobGen.java
@@ -20,6 +20,7 @@
import java.util.List;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.functions.FunctionDescriptorTag;
import org.apache.asterix.external.library.ExternalFunctionDescriptorProvider;
import org.apache.asterix.formats.base.IDataFormat;
@@ -136,8 +137,9 @@
IDataFormat format = FormatUtils.getDefaultFormat();
fd = format.resolveFunction(expr, env);
} else {
+ ICcApplicationContext appCtx = (ICcApplicationContext) context.getAppContext();
fd = ExternalFunctionDescriptorProvider
- .getExternalFunctionDescriptor((IExternalFunctionInfo) expr.getFunctionInfo());
+ .getExternalFunctionDescriptor((IExternalFunctionInfo) expr.getFunctionInfo(), appCtx);
}
return fd.createEvaluatorFactory(args);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
index ec73185..d94a045 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/RuleCollections.java
@@ -23,6 +23,7 @@
import java.util.LinkedList;
import java.util.List;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.optimizer.rules.AddEquivalenceClassForRecordConstructorRule;
import org.apache.asterix.optimizer.rules.AsterixExtractFunctionsFromJoinConditionRule;
import org.apache.asterix.optimizer.rules.AsterixInlineVariablesRule;
@@ -161,7 +162,7 @@
return Collections.singletonList(new FullTextContainsParameterCheckRule());
}
- public static final List<IAlgebraicRewriteRule> buildNormalizationRuleCollection() {
+ public static final List<IAlgebraicRewriteRule> buildNormalizationRuleCollection(ICcApplicationContext appCtx) {
List<IAlgebraicRewriteRule> normalization = new LinkedList<>();
normalization.add(new ResolveVariableRule());
normalization.add(new CheckInsertUpsertReturningRule());
@@ -186,7 +187,7 @@
// so that PushAggFunc can happen in fewer places.
normalization.add(new PushAggFuncIntoStandaloneAggregateRule());
normalization.add(new ListifyUnnestingFunctionRule());
- normalization.add(new ConstantFoldingRule());
+ normalization.add(new ConstantFoldingRule(appCtx));
normalization.add(new RemoveRedundantSelectRule());
normalization.add(new UnnestToDataScanRule());
normalization.add(new MetaFunctionToMetaVariableRule());
@@ -236,7 +237,7 @@
return condPushDownAndJoinInference;
}
- public static final List<IAlgebraicRewriteRule> buildLoadFieldsRuleCollection() {
+ public static final List<IAlgebraicRewriteRule> buildLoadFieldsRuleCollection(ICcApplicationContext appCtx) {
List<IAlgebraicRewriteRule> fieldLoads = new LinkedList<>();
fieldLoads.add(new LoadRecordFieldsRule());
fieldLoads.add(new PushFieldAccessRule());
@@ -245,7 +246,7 @@
fieldLoads.add(new RemoveRedundantVariablesRule());
fieldLoads.add(new AsterixInlineVariablesRule());
fieldLoads.add(new RemoveUnusedAssignAndAggregateRule());
- fieldLoads.add(new ConstantFoldingRule());
+ fieldLoads.add(new ConstantFoldingRule(appCtx));
fieldLoads.add(new RemoveRedundantSelectRule());
fieldLoads.add(new FeedScanCollectionToUnnest());
fieldLoads.add(new NestedSubplanToJoinRule());
@@ -340,13 +341,14 @@
return physicalRewritesAllLevels;
}
- public static final List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection() {
+ public static final List<IAlgebraicRewriteRule> buildPhysicalRewritesTopLevelRuleCollection(
+ ICcApplicationContext appCtx) {
List<IAlgebraicRewriteRule> physicalRewritesTopLevel = new LinkedList<>();
physicalRewritesTopLevel.add(new PushNestedOrderByUnderPreSortedGroupByRule());
physicalRewritesTopLevel.add(new CopyLimitDownRule());
// CopyLimitDownRule may generates non-topmost limits with numeric_adds functions.
// We are going to apply a constant folding rule again for this case.
- physicalRewritesTopLevel.add(new ConstantFoldingRule());
+ physicalRewritesTopLevel.add(new ConstantFoldingRule(appCtx));
physicalRewritesTopLevel.add(new PushLimitIntoOrderByRule());
physicalRewritesTopLevel.add(new IntroduceProjectsRule());
physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
index 98e9f9d..5ef41c4 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -24,6 +24,7 @@
import java.util.List;
import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.dataflow.data.common.ExpressionTypeComputer;
import org.apache.asterix.dataflow.data.nontagged.MissingWriterFactory;
import org.apache.asterix.formats.nontagged.ADMPrinterFactoryProvider;
@@ -55,8 +56,8 @@
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractLogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.expressions.ExpressionRuntimeProvider;
+import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.StatefulFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
@@ -80,16 +81,16 @@
public class ConstantFoldingRule implements IAlgebraicRewriteRule {
private final ConstantFoldingVisitor cfv = new ConstantFoldingVisitor();
+ private final JobGenContext jobGenCtx;
// Function Identifier sets that the ConstantFolding rule should skip to apply.
// Most of them are record-related functions.
private static final ImmutableSet<FunctionIdentifier> FUNC_ID_SET_THAT_SHOULD_NOT_BE_APPLIED =
- ImmutableSet.of(BuiltinFunctions.RECORD_MERGE, BuiltinFunctions.ADD_FIELDS,
- BuiltinFunctions.REMOVE_FIELDS, BuiltinFunctions.GET_RECORD_FIELDS,
- BuiltinFunctions.GET_RECORD_FIELD_VALUE, BuiltinFunctions.FIELD_ACCESS_NESTED,
- BuiltinFunctions.GET_ITEM, BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR,
- BuiltinFunctions.FIELD_ACCESS_BY_INDEX, BuiltinFunctions.CAST_TYPE,
- BuiltinFunctions.META, BuiltinFunctions.META_KEY);
+ ImmutableSet.of(BuiltinFunctions.RECORD_MERGE, BuiltinFunctions.ADD_FIELDS, BuiltinFunctions.REMOVE_FIELDS,
+ BuiltinFunctions.GET_RECORD_FIELDS, BuiltinFunctions.GET_RECORD_FIELD_VALUE,
+ BuiltinFunctions.FIELD_ACCESS_NESTED, BuiltinFunctions.GET_ITEM,
+ BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR, BuiltinFunctions.FIELD_ACCESS_BY_INDEX,
+ BuiltinFunctions.CAST_TYPE, BuiltinFunctions.META, BuiltinFunctions.META_KEY);
/** Throws exceptions in substituiteProducedVariable, setVarType, and one getVarType method. */
private static final IVariableTypeEnvironment _emptyTypeEnv = new IVariableTypeEnvironment() {
@@ -121,16 +122,17 @@
}
};
- private static final JobGenContext _jobGenCtx = new JobGenContext(null, null, null,
- SerializerDeserializerProvider.INSTANCE, BinaryHashFunctionFactoryProvider.INSTANCE,
- BinaryHashFunctionFamilyProvider.INSTANCE, BinaryComparatorFactoryProvider.INSTANCE,
- TypeTraitProvider.INSTANCE, BinaryBooleanInspector.FACTORY, BinaryIntegerInspector.FACTORY,
- ADMPrinterFactoryProvider.INSTANCE, MissingWriterFactory.INSTANCE, null,
- new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE),
- ExpressionTypeComputer.INSTANCE, null, null, null, null, GlobalConfig.DEFAULT_FRAME_SIZE, null);
-
private static final IOperatorSchema[] _emptySchemas = new IOperatorSchema[] {};
+ public ConstantFoldingRule(ICcApplicationContext appCtx) {
+ jobGenCtx = new JobGenContext(null, null, appCtx, SerializerDeserializerProvider.INSTANCE,
+ BinaryHashFunctionFactoryProvider.INSTANCE, BinaryHashFunctionFamilyProvider.INSTANCE,
+ BinaryComparatorFactoryProvider.INSTANCE, TypeTraitProvider.INSTANCE, BinaryBooleanInspector.FACTORY,
+ BinaryIntegerInspector.FACTORY, ADMPrinterFactoryProvider.INSTANCE, MissingWriterFactory.INSTANCE, null,
+ new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE), ExpressionTypeComputer.INSTANCE,
+ null, null, null, null, GlobalConfig.DEFAULT_FRAME_SIZE, null);
+ }
+
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
@@ -168,13 +170,13 @@
@Override
public Pair<Boolean, ILogicalExpression> visitConstantExpression(ConstantExpression expr, Void arg)
throws AlgebricksException {
- return new Pair<Boolean, ILogicalExpression>(false, expr);
+ return new Pair<>(false, expr);
}
@Override
public Pair<Boolean, ILogicalExpression> visitVariableReferenceExpression(VariableReferenceExpression expr,
Void arg) throws AlgebricksException {
- return new Pair<Boolean, ILogicalExpression>(false, expr);
+ return new Pair<>(false, expr);
}
@Override
@@ -182,12 +184,12 @@
Void arg) throws AlgebricksException {
boolean changed = changeRec(expr, arg);
if (!checkArgs(expr) || !expr.isFunctional()) {
- return new Pair<Boolean, ILogicalExpression>(changed, expr);
+ return new Pair<>(changed, expr);
}
// Skip Constant Folding for the record-related functions.
if (FUNC_ID_SET_THAT_SHOULD_NOT_BE_APPLIED.contains(expr.getFunctionIdentifier())) {
- return new Pair<Boolean, ILogicalExpression>(false, null);
+ return new Pair<>(false, null);
}
//Current List SerDe assumes a strongly typed list, so we do not constant fold the list constructors if they are not strongly typed
@@ -199,7 +201,7 @@
//case1: listType == null, could be a nested list inside a list<ANY>
//case2: itemType = ANY
//case3: itemType = a nested list
- return new Pair<Boolean, ILogicalExpression>(false, null);
+ return new Pair<>(false, null);
}
}
if (expr.getFunctionIdentifier().equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
@@ -208,20 +210,20 @@
int k = rt.getFieldIndex(str);
if (k >= 0) {
// wait for the ByNameToByIndex rule to apply
- return new Pair<Boolean, ILogicalExpression>(changed, expr);
+ return new Pair<>(changed, expr);
}
}
- IScalarEvaluatorFactory fact = _jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr,
- _emptyTypeEnv, _emptySchemas, _jobGenCtx);
+ IScalarEvaluatorFactory fact = jobGenCtx.getExpressionRuntimeProvider().createEvaluatorFactory(expr,
+ _emptyTypeEnv, _emptySchemas, jobGenCtx);
try {
IScalarEvaluator eval = fact.createScalarEvaluator(null);
eval.evaluate(null, p);
Object t = _emptyTypeEnv.getType(expr);
@SuppressWarnings("rawtypes")
- ISerializerDeserializer serde = _jobGenCtx.getSerializerDeserializerProvider()
- .getSerializerDeserializer(t);
+ ISerializerDeserializer serde =
+ jobGenCtx.getSerializerDeserializerProvider().getSerializerDeserializer(t);
bbis.setByteBuffer(ByteBuffer.wrap(p.getByteArray(), p.getStartOffset(), p.getLength()), 0);
IAObject o = (IAObject) serde.deserialize(dis);
return new Pair<>(true, new ConstantExpression(new AsterixConstantValue(o)));
@@ -234,21 +236,21 @@
public Pair<Boolean, ILogicalExpression> visitAggregateFunctionCallExpression(
AggregateFunctionCallExpression expr, Void arg) throws AlgebricksException {
boolean changed = changeRec(expr, arg);
- return new Pair<Boolean, ILogicalExpression>(changed, expr);
+ return new Pair<>(changed, expr);
}
@Override
public Pair<Boolean, ILogicalExpression> visitStatefulFunctionCallExpression(
StatefulFunctionCallExpression expr, Void arg) throws AlgebricksException {
boolean changed = changeRec(expr, arg);
- return new Pair<Boolean, ILogicalExpression>(changed, expr);
+ return new Pair<>(changed, expr);
}
@Override
public Pair<Boolean, ILogicalExpression> visitUnnestingFunctionCallExpression(
UnnestingFunctionCallExpression expr, Void arg) throws AlgebricksException {
boolean changed = changeRec(expr, arg);
- return new Pair<Boolean, ILogicalExpression>(changed, expr);
+ return new Pair<>(changed, expr);
}
private boolean changeRec(AbstractFunctionCallExpression expr, Void arg) throws AlgebricksException {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index 3cdeb6f..e2589da 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -25,6 +25,7 @@
import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.statement.DatasetDecl;
@@ -36,7 +37,6 @@
import org.apache.asterix.metadata.dataset.hints.DatasetHints;
import org.apache.asterix.metadata.entities.Dataverse;
import org.apache.asterix.metadata.utils.MetadataConstants;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -49,11 +49,12 @@
private static final Logger LOGGER = Logger.getLogger(AbstractLangTranslator.class.getName());
- public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+ public void validateOperation(ICcApplicationContext appCtx, Dataverse defaultDataverse, Statement stmt)
+ throws AsterixException {
if (!(ClusterStateManager.INSTANCE.getState().equals(ClusterState.ACTIVE)
&& ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted())) {
- int maxWaitCycles = AppContextInfo.INSTANCE.getExternalProperties().getMaxWaitClusterActive();
+ int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
try {
ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE, maxWaitCycles, TimeUnit.SECONDS);
} catch (HyracksDataException e) {
@@ -80,7 +81,7 @@
}
if (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted()) {
- int maxWaitCycles = AppContextInfo.INSTANCE.getExternalProperties().getMaxWaitClusterActive();
+ int maxWaitCycles = appCtx.getExternalProperties().getMaxWaitClusterActive();
int waitCycleCount = 0;
try {
while (!ClusterStateManager.INSTANCE.isGlobalRecoveryCompleted() && waitCycleCount < maxWaitCycles) {
@@ -162,7 +163,7 @@
Pair<Boolean, String> validationResult = null;
StringBuffer errorMsgBuffer = new StringBuffer();
for (Entry<String, String> hint : hints.entrySet()) {
- validationResult = DatasetHints.validate(hint.getKey(), hint.getValue());
+ validationResult = DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
if (!validationResult.first) {
errorMsgBuffer.append("Dataset: " + datasetStmt.getName().getValue()
+ " error in processing hint: " + hint.getKey() + " " + validationResult.second);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java
index cf54921..23365de 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/IStatementExecutorFactory.java
@@ -21,6 +21,7 @@
import java.util.List;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.Statement;
@@ -44,6 +45,6 @@
* provides storage related components
* @return an implementation of {@code IStatementExecutor} thaxt is used to execute the passed list of statements
*/
- IStatementExecutor create(List<Statement> statements, SessionConfig conf,
+ IStatementExecutor create(ICcApplicationContext appCtx, List<Statement> statements, SessionConfig conf,
ILangCompilationProvider compilationProvider, IStorageComponentProvider storageComponentProvider);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index c4cc486..4b4ef8e 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -34,6 +34,7 @@
import org.apache.asterix.algebra.operators.CommitOperator;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.functions.FunctionConstants;
import org.apache.asterix.common.functions.FunctionSignature;
@@ -94,7 +95,6 @@
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.runtime.formats.FormatUtils;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.translator.CompiledStatements.CompiledInsertStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
@@ -316,7 +316,7 @@
if (outputDatasetName == null) {
FileSplit outputFileSplit = metadataProvider.getOutputFile();
if (outputFileSplit == null) {
- outputFileSplit = getDefaultOutputFileLocation();
+ outputFileSplit = getDefaultOutputFileLocation(metadataProvider.getApplicationContext());
}
metadataProvider.setOutputFile(outputFileSplit);
@@ -606,8 +606,8 @@
}
}
// A change feed, we don't need the assign to access PKs
- upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading,
- metaExpSingletonList, InsertDeleteUpsertOperator.Kind.UPSERT, false);
+ upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading, metaExpSingletonList,
+ InsertDeleteUpsertOperator.Kind.UPSERT, false);
// Create and add a new variable used for representing the original record
upsertOp.setPrevRecordVar(context.newVar());
upsertOp.setPrevRecordType(targetDatasource.getItemType());
@@ -730,11 +730,11 @@
dataset.getDatasetDetails(), domain);
}
- private FileSplit getDefaultOutputFileLocation() throws MetadataException {
+ private FileSplit getDefaultOutputFileLocation(ICcApplicationContext appCtx) throws MetadataException {
String outputDir = System.getProperty("java.io.tmpDir");
String filePath =
outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX + outputFileID.incrementAndGet();
- MetadataProperties metadataProperties = AppContextInfo.INSTANCE.getMetadataProperties();
+ MetadataProperties metadataProperties = appCtx.getMetadataProperties();
return new ManagedFileSplit(metadataProperties.getMetadataNodeName(), filePath);
}
@@ -1005,8 +1005,8 @@
arguments.add(new MutableObject<>(ConstantExpression.TRUE));
arguments.add(new MutableObject<>(new VariableReferenceExpression(opAndVarForThen.second)));
arguments.add(new MutableObject<>(new VariableReferenceExpression(opAndVarForElse.second)));
- AbstractFunctionCallExpression swithCaseExpr = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.SWITCH_CASE), arguments);
+ AbstractFunctionCallExpression swithCaseExpr =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.SWITCH_CASE), arguments);
AssignOperator assignOp = new AssignOperator(selectVar, new MutableObject<>(swithCaseExpr));
assignOp.getInputs().add(new MutableObject<>(opAndVarForElse.first));
@@ -1176,8 +1176,7 @@
firstOp.getInputs().add(topOp);
topOp = lastOp;
- Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 =
- langExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
+ Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = langExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
AggregateFunctionCallExpression fAgg;
SelectOperator s;
@@ -1219,8 +1218,7 @@
Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = langExprToAlgExpression(fb.getLeftExpr(), topOp);
f.getArguments().add(new MutableObject<>(eo1.first));
topOp = eo1.second;
- Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 =
- langExprToAlgExpression(fb.getRightExpr(), topOp);
+ Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = langExprToAlgExpression(fb.getRightExpr(), topOp);
f.getArguments().add(new MutableObject<>(eo2.first));
topOp = eo2.second;
}
@@ -1699,9 +1697,8 @@
Mutable<ILogicalExpression> expr = new MutableObject<>(
new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.IS_UNKOWN),
new ArrayList<>(Collections.singletonList(new MutableObject<>(logicalExpr)))));
- arguments.add(new MutableObject<>(
- new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT),
- new ArrayList<>(Collections.singletonList(expr)))));
+ arguments.add(new MutableObject<>(new ScalarFunctionCallExpression(
+ FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT), new ArrayList<>(Collections.singletonList(expr)))));
return new MutableObject<>(
new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.AND), arguments));
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
index f0e8588..b12935d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -63,7 +63,6 @@
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.optimizer.base.FuzzyUtils;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
import org.apache.asterix.translator.IStatementExecutor.Stats;
@@ -113,13 +112,13 @@
private static final int MIN_FRAME_LIMIT_FOR_JOIN = 5;
// A white list of supported configurable parameters.
- private static final Set<String> CONFIGURABLE_PARAMETER_NAMES = ImmutableSet.of(
- CompilerProperties.COMPILER_JOINMEMORY_KEY, CompilerProperties.COMPILER_GROUPMEMORY_KEY,
- CompilerProperties.COMPILER_SORTMEMORY_KEY, CompilerProperties.COMPILER_PARALLELISM_KEY,
- FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
- FuzzyUtils.SIM_THRESHOLD_PROP_NAME, SubscribeFeedStatement.WAIT_FOR_COMPLETION,
- FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS, "inline_with", "hash_merge",
- "output-record-type");
+ private static final Set<String> CONFIGURABLE_PARAMETER_NAMES =
+ ImmutableSet.of(CompilerProperties.COMPILER_JOINMEMORY_KEY, CompilerProperties.COMPILER_GROUPMEMORY_KEY,
+ CompilerProperties.COMPILER_SORTMEMORY_KEY, CompilerProperties.COMPILER_PARALLELISM_KEY,
+ FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, FuzzyUtils.SIM_FUNCTION_PROP_NAME,
+ FuzzyUtils.SIM_THRESHOLD_PROP_NAME, SubscribeFeedStatement.WAIT_FOR_COMPLETION,
+ FeedActivityDetails.FEED_POLICY_NAME, FeedActivityDetails.COLLECT_LOCATIONS, "inline_with",
+ "hash_merge", "output-record-type");
private final IRewriterFactory rewriterFactory;
private final IAstPrintVisitorFactory astPrintVisitorFactory;
@@ -145,8 +144,7 @@
IExpressionEvalSizeComputer expressionEvalSizeComputer,
IMergeAggregationExpressionFactory mergeAggregationExpressionFactory,
IExpressionTypeComputer expressionTypeComputer, IMissableTypeComputer missableTypeComputer,
- IConflictingTypeResolver conflictingTypeResolver,
- PhysicalOptimizationConfig physicalOptimizationConfig,
+ IConflictingTypeResolver conflictingTypeResolver, PhysicalOptimizationConfig physicalOptimizationConfig,
AlgebricksPartitionConstraint clusterLocations) {
return new AlgebricksOptimizationContext(varCounter, expressionEvalSizeComputer,
mergeAggregationExpressionFactory, expressionTypeComputer, missableTypeComputer,
@@ -222,7 +220,7 @@
}
printPlanPostfix(conf);
}
- CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
+ CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties();
int frameSize = compilerProperties.getFrameSize();
Map<String, String> querySpecificConfig = metadataProvider.getConfig();
validateConfig(querySpecificConfig); // Validates the user-overridden query parameters.
@@ -243,8 +241,8 @@
HeuristicCompilerFactoryBuilder builder =
new HeuristicCompilerFactoryBuilder(OptimizationContextFactory.INSTANCE);
builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
- builder.setLogicalRewrites(ruleSetFactory.getLogicalRewrites());
- builder.setPhysicalRewrites(ruleSetFactory.getPhysicalRewrites());
+ builder.setLogicalRewrites(ruleSetFactory.getLogicalRewrites(metadataProvider.getApplicationContext()));
+ builder.setPhysicalRewrites(ruleSetFactory.getPhysicalRewrites(metadataProvider.getApplicationContext()));
IDataFormat format = metadataProvider.getFormat();
ICompilerFactory compilerFactory = builder.create();
builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer());
@@ -282,7 +280,8 @@
try {
LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
PlanPrettyPrinter.printPlan(plan, pvisitor, 0);
- ResultUtil.printResults(pvisitor.get().toString(), conf, new Stats(), null);
+ ResultUtil.printResults(metadataProvider.getApplicationContext(), pvisitor.get().toString(), conf,
+ new Stats(), null);
return null;
} catch (IOException e) {
throw new AlgebricksException(e);
@@ -296,8 +295,7 @@
builder.setBinaryBooleanInspectorFactory(format.getBinaryBooleanInspectorFactory());
builder.setBinaryIntegerInspectorFactory(format.getBinaryIntegerInspectorFactory());
builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider());
- builder.setExpressionRuntimeProvider(
- new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE));
+ builder.setExpressionRuntimeProvider(new ExpressionRuntimeProvider(QueryLogicalExpressionJobGen.INSTANCE));
builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
builder.setMissingWriterFactory(format.getMissingWriterFactory());
@@ -327,7 +325,7 @@
JobEventListenerFactory jobEventListenerFactory =
new JobEventListenerFactory(asterixJobId, metadataProvider.isWriteTransaction());
- JobSpecification spec = compiler.createJob(AppContextInfo.INSTANCE, jobEventListenerFactory);
+ JobSpecification spec = compiler.createJob(metadataProvider.getApplicationContext(), jobEventListenerFactory);
// When the top-level statement is a query, the statement parameter is null.
if (statement == null) {
@@ -465,8 +463,7 @@
int minFrameLimit)
throws AlgebricksException {
IOptionType<Long> longBytePropertyInterpreter = OptionTypes.LONG_BYTE_UNIT;
- long memBudget =
- parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.parse(parameter);
+ long memBudget = parameter == null ? memBudgetInConfiguration : longBytePropertyInterpreter.parse(parameter);
int frameLimit = (int) (memBudget / frameSize);
if (frameLimit < minFrameLimit) {
throw AsterixException.create(ErrorCode.COMPILATION_BAD_QUERY_PARAMETER_VALUE, parameterName,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java
index d8a2fab..6a3472d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AppRuntimeContextProviderForRecovery.java
@@ -20,7 +20,7 @@
import org.apache.asterix.app.nc.NCAppRuntimeContext;
import org.apache.asterix.common.api.ThreadExecutor;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.transactions.IAppRuntimeContextProvider;
import org.apache.asterix.common.transactions.ITransactionSubsystem;
@@ -85,7 +85,7 @@
}
@Override
- public IAppRuntimeContext getAppContext() {
+ public INcApplicationContext getAppContext() {
return asterixAppRuntimeContext;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
index f79a2da..a9c4b39 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/AbstractQueryApiServlet.java
@@ -26,6 +26,7 @@
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.hyracks.api.client.IHyracksClientConnection;
@@ -34,6 +35,7 @@
import org.apache.hyracks.http.server.AbstractServlet;
public class AbstractQueryApiServlet extends AbstractServlet {
+ protected final ICcApplicationContext appCtx;
public enum ResultFields {
REQUEST_ID("requestID"),
@@ -91,8 +93,9 @@
}
}
- AbstractQueryApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ AbstractQueryApiServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx, String[] paths) {
super(ctx, paths);
+ this.appCtx = appCtx;
}
protected IHyracksDataset getHyracksDataset() throws Exception { // NOSONAR
@@ -101,8 +104,8 @@
if (hds == null) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
- hds = new HyracksDataset(getHyracksClientConnection(), ResultReader.FRAME_SIZE,
- ResultReader.NUM_READERS);
+ hds = new HyracksDataset(getHyracksClientConnection(),
+ appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
index e435da7..3384332 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
@@ -38,6 +38,7 @@
import org.apache.asterix.app.result.ResultReader;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.aql.parser.TokenMgrError;
@@ -67,15 +68,17 @@
private static final Logger LOGGER = Logger.getLogger(ApiServlet.class.getName());
public static final String HTML_STATEMENT_SEPARATOR = "<!-- BEGIN -->";
+ private final ICcApplicationContext appCtx;
private final ILangCompilationProvider aqlCompilationProvider;
private final ILangCompilationProvider sqlppCompilationProvider;
private final IStatementExecutorFactory statementExectorFactory;
private final IStorageComponentProvider componentProvider;
- public ApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public ApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider aqlCompilationProvider, ILangCompilationProvider sqlppCompilationProvider,
IStatementExecutorFactory statementExecutorFactory, IStorageComponentProvider componentProvider) {
super(ctx, paths);
+ this.appCtx = appCtx;
this.aqlCompilationProvider = aqlCompilationProvider;
this.sqlppCompilationProvider = sqlppCompilationProvider;
this.statementExectorFactory = statementExecutorFactory;
@@ -126,7 +129,8 @@
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
- hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+ hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(),
+ ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
@@ -140,7 +144,7 @@
sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam),
isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
MetadataManager.INSTANCE.init();
- IStatementExecutor translator = statementExectorFactory.create(aqlStatements, sessionConfig,
+ IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionConfig,
compilationProvider, componentProvider);
double duration;
long startTime = System.currentTimeMillis();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
index 984eef2..82e8f7a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ClusterApiServlet.java
@@ -28,7 +28,7 @@
import java.util.logging.Logger;
import java.util.regex.Pattern;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.config.Section;
@@ -99,8 +99,8 @@
protected ObjectNode getClusterStateJSON(IServletRequest request, String pathToNode) {
ObjectNode json = ClusterStateManager.INSTANCE.getClusterStateDescription();
- AppContextInfo appConfig = (AppContextInfo) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
- json.putPOJO("config", ConfigUtils.getSectionOptionsForJSON(appConfig.getCCServiceContext().getAppConfig(),
+ CcApplicationContext appConfig = (CcApplicationContext) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
+ json.putPOJO("config", ConfigUtils.getSectionOptionsForJSON(appConfig.getServiceContext().getAppConfig(),
Section.COMMON, getConfigSelector()));
ArrayNode ncs = (ArrayNode) json.get("ncs");
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
index 869289a..cb3c063 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ConnectorApiServlet.java
@@ -28,6 +28,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -58,9 +59,11 @@
*/
public class ConnectorApiServlet extends AbstractServlet {
private static final Logger LOGGER = Logger.getLogger(ConnectorApiServlet.class.getName());
+ private ICcApplicationContext appCtx;
- public ConnectorApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public ConnectorApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
super(ctx, paths);
+ this.appCtx = appCtx;
}
@Override
@@ -91,7 +94,7 @@
MetadataManager.INSTANCE.init();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// Retrieves file splits of the dataset.
- MetadataProvider metadataProvider = new MetadataProvider(null, new StorageComponentProvider());
+ MetadataProvider metadataProvider = new MetadataProvider(appCtx, null, new StorageComponentProvider());
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java
index a994470..4c1dbf7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DdlApiServlet.java
@@ -21,6 +21,7 @@
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -30,10 +31,10 @@
private static final byte ALLOWED_CATEGORIES =
Statement.Category.QUERY | Statement.Category.UPDATE | Statement.Category.DDL;
- public DdlApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public DdlApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider componentProvider) {
- super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+ super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
index 788927f..dcd0e70 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
@@ -35,7 +35,7 @@
import java.util.logging.Logger;
import org.apache.asterix.api.http.servlet.ServletConstants;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
@@ -49,9 +49,11 @@
public class DiagnosticsApiServlet extends NodeControllerDetailsApiServlet {
private static final Logger LOGGER = Logger.getLogger(DiagnosticsApiServlet.class.getName());
+ private final ICcApplicationContext appCtx;
- public DiagnosticsApiServlet(ConcurrentMap<String, Object> ctx, String... paths) {
+ public DiagnosticsApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
super(ctx, paths);
+ this.appCtx = appCtx;
}
@Override
@@ -93,10 +95,9 @@
executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(null, true, false)))));
Map<String, Map<String, Future<ObjectNode>>> ncDataMap = new HashMap<>();
- for (String nc : AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames()) {
+ for (String nc : appCtx.getMetadataProperties().getNodeNames()) {
Map<String, Future<ObjectNode>> ncData = new HashMap<>();
- ncData.put("threaddump",
- executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getThreadDump(nc)))));
+ ncData.put("threaddump", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getThreadDump(nc)))));
ncData.put("config", executor
.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(nc, false, true)))));
ncData.put("stats", executor.submit(() -> fixupKeys(processNodeStats(hcc, nc))));
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java
index eafae35..0a461c7 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/FullApiServlet.java
@@ -21,6 +21,7 @@
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -32,10 +33,10 @@
private static final byte ALLOWED_CATEGORIES = Statement.Category.QUERY | Statement.Category.UPDATE
| Statement.Category.DDL | Statement.Category.PROCEDURE;
- public FullApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public FullApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider componentProvider) {
- super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+ super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java
index 5075795..3e692d3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryApiServlet.java
@@ -21,6 +21,7 @@
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -29,10 +30,10 @@
public class QueryApiServlet extends RestApiServlet {
private static final byte ALLOWED_CATEGORIES = Statement.Category.QUERY;
- public QueryApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public QueryApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider componentProvider) {
- super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+ super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
index 57492be..401f55e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryResultApiServlet.java
@@ -25,6 +25,7 @@
import org.apache.asterix.app.result.ResultHandle;
import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.translator.IStatementExecutor.Stats;
import org.apache.asterix.translator.SessionConfig;
import org.apache.hyracks.api.dataset.DatasetJobRecord;
@@ -40,8 +41,8 @@
public class QueryResultApiServlet extends AbstractQueryApiServlet {
private static final Logger LOGGER = Logger.getLogger(QueryResultApiServlet.class.getName());
- public QueryResultApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
- super(ctx, paths);
+ public QueryResultApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
+ super(appCtx, ctx, paths);
}
@Override
@@ -94,7 +95,7 @@
// originally determined there. Need to save this value on
// some object that we can obtain here.
SessionConfig sessionConfig = RestApiServlet.initResponse(request, response);
- ResultUtil.printResults(resultReader, sessionConfig, new Stats(), null);
+ ResultUtil.printResults(appCtx, resultReader, sessionConfig, new Stats(), null);
} catch (HyracksDataException e) {
final int errorCode = e.getErrorCode();
if (ErrorCode.NO_RESULTSET == errorCode) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
index faf9968..20bffc4 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
@@ -18,8 +18,6 @@
*/
package org.apache.asterix.api.http.server;
-import static org.apache.asterix.translator.IStatementExecutor.ResultDelivery;
-
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
@@ -34,6 +32,7 @@
import org.apache.asterix.common.api.IClusterManagementWork;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.aql.parser.TokenMgrError;
@@ -42,6 +41,7 @@
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.asterix.translator.IStatementExecutor;
+import org.apache.asterix.translator.IStatementExecutor.ResultDelivery;
import org.apache.asterix.translator.IStatementExecutor.Stats;
import org.apache.asterix.translator.IStatementExecutorContext;
import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -71,10 +71,10 @@
private final IStorageComponentProvider componentProvider;
private final IStatementExecutorContext queryCtx = new StatementExecutorContext();
- public QueryServiceServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public QueryServiceServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider componentProvider) {
- super(ctx, paths);
+ super(appCtx, ctx, paths);
this.compilationProvider = compilationProvider;
this.statementExecutorFactory = statementExecutorFactory;
this.componentProvider = componentProvider;
@@ -179,7 +179,6 @@
String clientContextID;
String mode;
-
@Override
public String toString() {
try {
@@ -236,8 +235,8 @@
return SessionConfig.OutputFormat.CLEAN_JSON;
}
- private static SessionConfig createSessionConfig(RequestParameters param, String handleUrl, PrintWriter
- resultWriter) {
+ private static SessionConfig createSessionConfig(RequestParameters param, String handleUrl,
+ PrintWriter resultWriter) {
SessionConfig.ResultDecorator resultPrefix = new SessionConfig.ResultDecorator() {
int resultNo = -1;
@@ -386,9 +385,12 @@
* delivery mode. Usually there will be a "status" endpoint for ASYNC requests that exposes the status of the
* execution and a "result" endpoint for DEFERRED requests that will deliver the result for a successful execution.
*
- * @param host hostname used for this request
- * @param path servlet path for this request
- * @param delivery ResultDelivery mode for this request
+ * @param host
+ * hostname used for this request
+ * @param path
+ * servlet path for this request
+ * @param delivery
+ * ResultDelivery mode for this request
* @return a handle (URL) that allows a client to access further information for this request
*/
protected String getHandleUrl(String host, String path, ResultDelivery delivery) {
@@ -430,7 +432,8 @@
List<Statement> statements = parser.parse();
MetadataManager.INSTANCE.init();
IStatementExecutor translator =
- statementExecutorFactory.create(statements, sessionConfig, compilationProvider, componentProvider);
+ statementExecutorFactory.create(appCtx, statements, sessionConfig, compilationProvider,
+ componentProvider);
execStart = System.nanoTime();
translator.compileAndExecute(getHyracksClientConnection(), getHyracksDataset(), delivery, stats,
param.clientContextID, queryCtx);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
index 8fbb4c5..d0c574e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryStatusApiServlet.java
@@ -29,6 +29,7 @@
import org.apache.asterix.app.result.ResultHandle;
import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.dataset.DatasetJobRecord;
import org.apache.hyracks.api.dataset.IHyracksDataset;
import org.apache.hyracks.http.api.IServletRequest;
@@ -40,8 +41,8 @@
public class QueryStatusApiServlet extends AbstractQueryApiServlet {
private static final Logger LOGGER = Logger.getLogger(QueryStatusApiServlet.class.getName());
- public QueryStatusApiServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
- super(ctx, paths);
+ public QueryStatusApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx) {
+ super(appCtx, ctx, paths);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
index 94ce017..c798a7c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryWebInterfaceServlet.java
@@ -25,7 +25,7 @@
import java.util.logging.Logger;
import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.StaticResourceServlet;
@@ -38,9 +38,11 @@
public class QueryWebInterfaceServlet extends StaticResourceServlet {
private static final Logger LOGGER = Logger.getLogger(QueryWebInterfaceServlet.class.getName());
+ private ICcApplicationContext appCtx;
- public QueryWebInterfaceServlet(ConcurrentMap<String, Object> ctx, String[] paths) {
+ public QueryWebInterfaceServlet(ICcApplicationContext appCtx, ConcurrentMap<String, Object> ctx, String[] paths) {
super(ctx, paths);
+ this.appCtx = appCtx;
}
@Override
@@ -57,7 +59,7 @@
@Override
protected void post(IServletRequest request, IServletResponse response) throws IOException {
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
- ExternalProperties externalProperties = AppContextInfo.INSTANCE.getExternalProperties();
+ ExternalProperties externalProperties = appCtx.getExternalProperties();
response.setStatus(HttpResponseStatus.OK);
ObjectMapper om = new ObjectMapper();
ObjectNode obj = om.createObjectNode();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
index 74290f3..e339ba9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
@@ -32,6 +32,7 @@
import org.apache.asterix.app.translator.QueryTranslator;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.aql.parser.TokenMgrError;
@@ -61,15 +62,17 @@
public abstract class RestApiServlet extends AbstractServlet {
private static final Logger LOGGER = Logger.getLogger(RestApiServlet.class.getName());
+ private final ICcApplicationContext appCtx;
private final ILangCompilationProvider compilationProvider;
private final IParserFactory parserFactory;
private final IStatementExecutorFactory statementExecutorFactory;
private final IStorageComponentProvider componentProvider;
- public RestApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public RestApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider componentProvider) {
super(ctx, paths);
+ this.appCtx = appCtx;
this.compilationProvider = compilationProvider;
this.parserFactory = compilationProvider.getParserFactory();
this.statementExecutorFactory = statementExecutorFactory;
@@ -188,7 +191,8 @@
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
- hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS);
+ hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(),
+ ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
@@ -197,7 +201,7 @@
List<Statement> aqlStatements = parser.parse();
validate(aqlStatements);
MetadataManager.INSTANCE.init();
- IStatementExecutor translator = statementExecutorFactory.create(aqlStatements, sessionConfig,
+ IStatementExecutor translator = statementExecutorFactory.create(appCtx, aqlStatements, sessionConfig,
compilationProvider, componentProvider);
translator.compileAndExecute(hcc, hds, resultDelivery, new IStatementExecutor.Stats());
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
index a257958..fe6fa89 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
@@ -36,6 +36,7 @@
import org.apache.asterix.app.result.ResultHandle;
import org.apache.asterix.app.result.ResultPrinter;
import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.translator.IStatementExecutor.Stats;
import org.apache.asterix.translator.SessionConfig;
@@ -76,14 +77,14 @@
return escaped;
}
- public static void printResults(ResultReader resultReader, SessionConfig conf, Stats stats,
- ARecordType recordType) throws HyracksDataException {
- new ResultPrinter(conf, stats, recordType).print(resultReader);
+ public static void printResults(ICcApplicationContext appCtx, ResultReader resultReader, SessionConfig conf,
+ Stats stats, ARecordType recordType) throws HyracksDataException {
+ new ResultPrinter(appCtx, conf, stats, recordType).print(resultReader);
}
- public static void printResults(String record, SessionConfig conf, Stats stats, ARecordType recordType)
- throws HyracksDataException {
- new ResultPrinter(conf, stats, recordType).print(record);
+ public static void printResults(ICcApplicationContext appCtx, String record, SessionConfig conf, Stats stats,
+ ARecordType recordType) throws HyracksDataException {
+ new ResultPrinter(appCtx, conf, stats, recordType).print(record);
}
public static void printResultHandle(SessionConfig conf, ResultHandle handle) throws HyracksDataException {
@@ -123,9 +124,8 @@
pw.print("\": [{ \n");
printField(pw, QueryServiceServlet.ErrorField.CODE.str(), "1");
final String msg = rootCause.getMessage();
- printField(pw, QueryServiceServlet.ErrorField.MSG.str(), JSONUtil
- .escape(msg != null ? msg : rootCause.getClass().getSimpleName()),
- addStack);
+ printField(pw, QueryServiceServlet.ErrorField.MSG.str(),
+ JSONUtil.escape(msg != null ? msg : rootCause.getClass().getSimpleName()), addStack);
pw.print(comma ? "\t}],\n" : "\t}]\n");
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java
index ad2c128..3650189 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/UpdateApiServlet.java
@@ -21,6 +21,7 @@
import java.util.concurrent.ConcurrentMap;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.translator.IStatementExecutorFactory;
@@ -29,10 +30,10 @@
public class UpdateApiServlet extends RestApiServlet {
private static final byte ALLOWED_CATEGORIES = Statement.Category.QUERY | Statement.Category.UPDATE;
- public UpdateApiServlet(ConcurrentMap<String, Object> ctx, String[] paths,
+ public UpdateApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ICcApplicationContext appCtx,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider componentProvider) {
- super(ctx, paths, compilationProvider, statementExecutorFactory, componentProvider);
+ super(ctx, paths, appCtx, compilationProvider, statementExecutorFactory, componentProvider);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
index a4cea39..5acba381 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/VersionApiServlet.java
@@ -27,15 +27,17 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import io.netty.handler.codec.http.HttpResponseStatus;
-import org.apache.asterix.common.config.IPropertiesProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.http.api.IServletRequest;
import org.apache.hyracks.http.api.IServletResponse;
import org.apache.hyracks.http.server.AbstractServlet;
import org.apache.hyracks.http.server.utils.HttpUtil;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import io.netty.handler.codec.http.HttpResponseStatus;
+
public class VersionApiServlet extends AbstractServlet {
private static final Logger LOGGER = Logger.getLogger(VersionApiServlet.class.getName());
@@ -46,7 +48,7 @@
@Override
protected void get(IServletRequest request, IServletResponse response) {
response.setStatus(HttpResponseStatus.OK);
- IPropertiesProvider props = (IPropertiesProvider) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
+ ICcApplicationContext props = (ICcApplicationContext) ctx.get(ASTERIX_APP_CONTEXT_INFO_ATTR);
Map<String, String> buildProperties = props.getBuildProperties().getAllProps();
ObjectMapper om = new ObjectMapper();
ObjectNode responseObject = om.createObjectNode();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
index d03e574..ecf2c53 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
@@ -25,6 +25,7 @@
import org.apache.asterix.api.common.APIFramework;
import org.apache.asterix.app.translator.QueryTranslator;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.utils.Job;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.IParser;
@@ -51,10 +52,12 @@
private final APIFramework apiFramework;
private final IStatementExecutorFactory statementExecutorFactory;
private final IStorageComponentProvider storageComponentProvider;
+ private ICcApplicationContext appCtx;
- public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText, PrintWriter writer,
- ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
- IStorageComponentProvider storageComponentProvider) {
+ public AsterixJavaClient(ICcApplicationContext appCtx, IHyracksClientConnection hcc, Reader queryText,
+ PrintWriter writer, ILangCompilationProvider compilationProvider,
+ IStatementExecutorFactory statementExecutorFactory, IStorageComponentProvider storageComponentProvider) {
+ this.appCtx = appCtx;
this.hcc = hcc;
this.queryText = queryText;
this.writer = writer;
@@ -65,10 +68,10 @@
parserFactory = compilationProvider.getParserFactory();
}
- public AsterixJavaClient(IHyracksClientConnection hcc, Reader queryText,
+ public AsterixJavaClient(ICcApplicationContext appCtx, IHyracksClientConnection hcc, Reader queryText,
ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory,
IStorageComponentProvider storageComponentProvider) {
- this(hcc, queryText,
+ this(appCtx, hcc, queryText,
// This is a commandline client and so System.out is appropriate
new PrintWriter(System.out, true), // NOSONAR
compilationProvider, statementExecutorFactory, storageComponentProvider);
@@ -102,8 +105,8 @@
conf.set(SessionConfig.FORMAT_ONLY_PHYSICAL_OPS, true);
}
- IStatementExecutor translator =
- statementExecutorFactory.create(statements, conf, compilationProvider, storageComponentProvider);
+ IStatementExecutor translator = statementExecutorFactory.create(appCtx, statements, conf, compilationProvider,
+ storageComponentProvider);
translator.compileAndExecute(hcc, null, QueryTranslator.ResultDelivery.IMMEDIATE,
new IStatementExecutor.Stats());
writer.flush();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java
index f43092b..372404c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/cc/ResourceIdManager.java
@@ -36,7 +36,7 @@
if (!allReported) {
synchronized (this) {
if (!allReported) {
- if (reportedNodes.size() < ClusterStateManager.getNumberOfNodes()) {
+ if (reportedNodes.size() < ClusterStateManager.INSTANCE.getNumberOfNodes()) {
return -1;
} else {
reportedNodes = null;
@@ -58,7 +58,7 @@
if (!allReported) {
globalResourceId.set(Math.max(maxResourceId, globalResourceId.get()));
reportedNodes.add(nodeId);
- if (reportedNodes.size() == ClusterStateManager.getNumberOfNodes()) {
+ if (reportedNodes.size() == ClusterStateManager.INSTANCE.getNumberOfNodes()) {
reportedNodes = null;
allReported = true;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
index a0b4a2a..555f571 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
@@ -29,8 +29,8 @@
import org.apache.asterix.active.ActiveManager;
import org.apache.asterix.api.common.AppRuntimeContextProviderForRecovery;
-import org.apache.asterix.common.api.IAppRuntimeContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.ThreadExecutor;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.config.ActiveProperties;
@@ -99,7 +99,7 @@
import org.apache.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
import org.apache.hyracks.storage.common.file.IResourceIdFactory;
-public class NCAppRuntimeContext implements IAppRuntimeContext {
+public class NCAppRuntimeContext implements INcApplicationContext {
private static final Logger LOGGER = Logger.getLogger(NCAppRuntimeContext.class.getName());
private ILSMMergePolicyFactory metadataMergePolicyFactory;
@@ -165,28 +165,28 @@
@Override
public void initialize(boolean initialRun) throws IOException, ACIDException {
- ioManager = ncServiceContext.getIoManager();
- threadExecutor = new ThreadExecutor(ncServiceContext.getThreadFactory());
+ ioManager = getServiceContext().getIoManager();
+ threadExecutor = new ThreadExecutor(getServiceContext().getThreadFactory());
fileMapManager = new FileMapManager(ioManager);
ICacheMemoryAllocator allocator = new HeapBufferAllocator();
IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
IPageReplacementStrategy prs = new ClockPageReplacementStrategy(allocator,
storageProperties.getBufferCachePageSize(), storageProperties.getBufferCacheNumPages());
- AsynchronousScheduler.INSTANCE.init(ncServiceContext.getThreadFactory());
+ AsynchronousScheduler.INSTANCE.init(getServiceContext().getThreadFactory());
lsmIOScheduler = AsynchronousScheduler.INSTANCE;
metadataMergePolicyFactory = new PrefixMergePolicyFactory();
ILocalResourceRepositoryFactory persistentLocalResourceRepositoryFactory =
- new PersistentLocalResourceRepositoryFactory(ioManager, ncServiceContext.getNodeId(),
+ new PersistentLocalResourceRepositoryFactory(ioManager, getServiceContext().getNodeId(),
metadataProperties);
localResourceRepository =
(PersistentLocalResourceRepository) persistentLocalResourceRepositoryFactory.createRepository();
IAppRuntimeContextProvider asterixAppRuntimeContextProvider = new AppRuntimeContextProviderForRecovery(this);
- txnSubsystem = new TransactionSubsystem(ncServiceContext, ncServiceContext.getNodeId(),
+ txnSubsystem = new TransactionSubsystem(getServiceContext(), getServiceContext().getNodeId(),
asterixAppRuntimeContextProvider, txnProperties);
IRecoveryManager recoveryMgr = txnSubsystem.getRecoveryManager();
@@ -202,11 +202,11 @@
isShuttingdown = false;
- activeManager = new ActiveManager(threadExecutor, ncServiceContext.getNodeId(),
+ activeManager = new ActiveManager(threadExecutor, getServiceContext().getNodeId(),
activeProperties.getMemoryComponentGlobalBudget(), compilerProperties.getFrameSize());
- if (replicationProperties.isParticipant(ncServiceContext.getNodeId())) {
- String nodeId = ncServiceContext.getNodeId();
+ if (replicationProperties.isParticipant(getServiceContext().getNodeId())) {
+ String nodeId = getServiceContext().getNodeId();
replicaResourcesManager = new ReplicaResourcesManager(localResourceRepository, metadataProperties);
@@ -235,24 +235,24 @@
//initialize replication channel
replicationChannel = new ReplicationChannel(nodeId, replicationProperties, txnSubsystem.getLogManager(),
- replicaResourcesManager, replicationManager, ncServiceContext,
+ replicaResourcesManager, replicationManager, getServiceContext(),
asterixAppRuntimeContextProvider);
remoteRecoveryManager = new RemoteRecoveryManager(replicationManager, this, replicationProperties);
bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
- storageProperties.getBufferCacheMaxOpenFiles(), ncServiceContext.getThreadFactory(),
+ storageProperties.getBufferCacheMaxOpenFiles(), getServiceContext().getThreadFactory(),
replicationManager);
} else {
bufferCache = new BufferCache(ioManager, prs, pcp, fileMapManager,
- storageProperties.getBufferCacheMaxOpenFiles(), ncServiceContext.getThreadFactory());
+ storageProperties.getBufferCacheMaxOpenFiles(), getServiceContext().getThreadFactory());
}
/*
* The order of registration is important. The buffer cache must registered before recovery and transaction
* managers. Notes: registered components are stopped in reversed order
*/
- ILifeCycleComponentManager lccm = ncServiceContext.getLifeCycleComponentManager();
+ ILifeCycleComponentManager lccm = getServiceContext().getLifeCycleComponentManager();
lccm.register((ILifeCycleComponent) bufferCache);
/*
* LogManager must be stopped after RecoveryManager, DatasetLifeCycleManager, and ReplicationManager
@@ -444,7 +444,7 @@
MetadataNode.INSTANCE.initialize(this, ncExtensionManager.getMetadataTupleTranslatorProvider(),
ncExtensionManager.getMetadataExtensions());
- proxy = (IAsterixStateProxy) ncServiceContext.getDistributedState();
+ proxy = (IAsterixStateProxy) getServiceContext().getDistributedState();
if (proxy == null) {
throw new IllegalStateException("Metadata node cannot access distributed state");
}
@@ -453,9 +453,9 @@
// This way we can delay the registration of the metadataNode until
// it is completely initialized.
MetadataManager.initialize(proxy, MetadataNode.INSTANCE);
- MetadataBootstrap.startUniverse(ncServiceContext, newUniverse);
+ MetadataBootstrap.startUniverse(getServiceContext(), newUniverse);
MetadataBootstrap.startDDLRecovery();
- ncExtensionManager.initializeMetadata(ncServiceContext);
+ ncExtensionManager.initializeMetadata(getServiceContext());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Metadata node bound");
@@ -466,7 +466,7 @@
public void exportMetadataNodeStub() throws RemoteException {
IMetadataNode stub = (IMetadataNode) UnicastRemoteObject.exportObject(MetadataNode.INSTANCE,
getMetadataProperties().getMetadataPort());
- ((IAsterixStateProxy) ncServiceContext.getDistributedState()).setMetadataNode(stub);
+ ((IAsterixStateProxy) getServiceContext().getDistributedState()).setMetadataNode(stub);
}
@Override
@@ -482,4 +482,9 @@
public IStorageComponentProvider getStorageComponentProvider() {
return componentProvider;
}
+
+ @Override
+ public INCServiceContext getServiceContext() {
+ return ncServiceContext;
+ }
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java
index 91f3524..424e66c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/BindMetadataNodeTask.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.app.nc.task;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.service.IControllerService;
@@ -34,7 +34,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
try {
if (exportStub) {
appContext.exportMetadataNodeStub();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java
index e77346a..b7701d2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/CheckpointTask.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.app.nc.task;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.transactions.ICheckpointManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,7 +30,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
ICheckpointManager checkpointMgr = appContext.getTransactionSubsystem().getCheckpointManager();
checkpointMgr.doSharpCheckpoint();
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java
index ad9b28a..9506690 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/ExternalLibrarySetupTask.java
@@ -19,7 +19,7 @@
package org.apache.asterix.app.nc.task;
import org.apache.asterix.app.external.ExternalLibraryUtils;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.service.IControllerService;
@@ -35,7 +35,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
try {
ExternalLibraryUtils.setUpExternaLibraries(appContext.getLibraryManager(), metadataNode);
} catch (Exception e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java
index 777097d..d52d15e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/LocalRecoveryTask.java
@@ -21,7 +21,7 @@
import java.io.IOException;
import java.util.Set;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -38,7 +38,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
try {
appContext.getTransactionSubsystem().getRecoveryManager().startLocalRecovery(partitions);
} catch (IOException | ACIDException e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java
index 65004b8..6415416 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/MetadataBootstrapTask.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.app.nc.task;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,7 +30,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
try {
SystemState state = appContext.getTransactionSubsystem().getRecoveryManager().getSystemState();
appContext.initializeMetadata(state == SystemState.PERMANENT_DATA_LOSS);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java
index 48479c5..f74a986 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/RemoteRecoveryTask.java
@@ -22,7 +22,7 @@
import java.util.Map;
import java.util.Set;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.service.IControllerService;
@@ -38,7 +38,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
appContext.getRemoteRecoveryManager().doRemoteRecoveryPlan(recoveryPlan);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java
index 9f04b10..8696d23 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartFailbackTask.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.app.nc.task;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.service.IControllerService;
@@ -29,7 +29,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
appContext.getRemoteRecoveryManager().startFailbackProcess();
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
index d1754dd..799581b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartLifecycleComponentsTask.java
@@ -23,7 +23,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.config.MetadataProperties;
import org.apache.asterix.hyracks.bootstrap.AsterixStateDumpHandler;
@@ -40,7 +40,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext applicationContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext applicationContext = (INcApplicationContext) cs.getApplicationContext();
NCServiceContext serviceCtx = (NCServiceContext) cs.getContext();
MetadataProperties metadataProperties = applicationContext.getMetadataProperties();
if (LOGGER.isLoggable(Level.INFO)) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java
index 93e5b50..60d5c29 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/task/StartReplicationServiceTask.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.app.nc.task;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.replication.IReplicationManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -30,7 +30,7 @@
@Override
public void perform(IControllerService cs) throws HyracksDataException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) cs.getApplicationContext();
try {
//Open replication channel
appContext.getReplicationChannel().start();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
index 45f96ac..db26c3a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/AutoFaultToleranceStrategy.java
@@ -55,6 +55,7 @@
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.cluster.IClusterStateManager;
import org.apache.asterix.common.config.ReplicationProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.messaging.api.ICCMessageBroker;
@@ -63,8 +64,8 @@
import org.apache.asterix.common.replication.IReplicationStrategy;
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.util.FaultToleranceUtil;
+import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
import org.apache.hyracks.api.config.IOption;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -83,6 +84,7 @@
private IClusterStateManager clusterManager;
private ICCMessageBroker messageBroker;
private IReplicationStrategy replicationStrategy;
+ private ICCServiceContext serviceCtx;
private Set<String> pendingStartupCompletionNodes = new HashSet<>();
@Override
@@ -135,8 +137,8 @@
private synchronized void requestPartitionsTakeover(String failedNodeId) {
//replica -> list of partitions to takeover
Map<String, List<Integer>> partitionRecoveryPlan = new HashMap<>();
- ReplicationProperties replicationProperties = AppContextInfo.INSTANCE.getReplicationProperties();
-
+ ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+ ReplicationProperties replicationProperties = appCtx.getReplicationProperties();
//collect the partitions of the failed NC
List<ClusterPartition> lostPartitions = getNodeAssignedPartitions(failedNodeId);
if (!lostPartitions.isEmpty()) {
@@ -204,7 +206,8 @@
planId2FailbackPlanMap.put(plan.getPlanId(), plan);
//get all partitions this node requires to resync
- ReplicationProperties replicationProperties = AppContextInfo.INSTANCE.getReplicationProperties();
+ ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+ ReplicationProperties replicationProperties = appCtx.getReplicationProperties();
Set<String> nodeReplicas = replicationProperties.getNodeReplicasIds(failingBackNodeId);
clusterManager.getClusterPartitons();
for (String replicaId : nodeReplicas) {
@@ -255,7 +258,8 @@
* if the returning node is the original metadata node,
* then metadata node will change after the failback completes
*/
- String originalMetadataNode = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataNodeName();
+ ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+ String originalMetadataNode = appCtx.getMetadataProperties().getMetadataNodeName();
if (originalMetadataNode.equals(failbackNode)) {
plan.setNodeToReleaseMetadataManager(currentMetadataNode);
currentMetadataNode = "";
@@ -399,7 +403,8 @@
private synchronized void requestMetadataNodeTakeover() {
//need a new node to takeover metadata node
- ClusterPartition metadataPartiton = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataPartition();
+ ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+ ClusterPartition metadataPartiton = appCtx.getMetadataProperties().getMetadataPartition();
//request the metadataPartition node to register itself as the metadata node
TakeoverMetadataNodeRequestMessage takeoverRequest = new TakeoverMetadataNodeRequestMessage();
try {
@@ -418,10 +423,11 @@
}
@Override
- public IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker) {
+ public IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy) {
AutoFaultToleranceStrategy ft = new AutoFaultToleranceStrategy();
- ft.messageBroker = messageBroker;
+ ft.messageBroker = (ICCMessageBroker) serviceCtx.getMessageBroker();
ft.replicationStrategy = replicationStrategy;
+ ft.serviceCtx = serviceCtx;
return ft;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java
index 8d382a1..4e8ecd9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/FaultToleranceStrategyFactory.java
@@ -21,15 +21,15 @@
import java.util.HashMap;
import java.util.Map;
-import org.apache.asterix.common.messaging.api.ICCMessageBroker;
import org.apache.asterix.common.replication.IFaultToleranceStrategy;
import org.apache.asterix.common.replication.IReplicationStrategy;
import org.apache.asterix.event.schema.cluster.Cluster;
+import org.apache.hyracks.api.application.ICCServiceContext;
public class FaultToleranceStrategyFactory {
- private static final Map<String, Class<? extends IFaultToleranceStrategy>>
- BUILT_IN_FAULT_TOLERANCE_STRATEGY = new HashMap<>();
+ private static final Map<String, Class<? extends IFaultToleranceStrategy>> BUILT_IN_FAULT_TOLERANCE_STRATEGY =
+ new HashMap<>();
static {
BUILT_IN_FAULT_TOLERANCE_STRATEGY.put("no_fault_tolerance", NoFaultToleranceStrategy.class);
@@ -42,14 +42,14 @@
}
public static IFaultToleranceStrategy create(Cluster cluster, IReplicationStrategy repStrategy,
- ICCMessageBroker messageBroker) {
- boolean highAvailabilityEnabled = cluster.getHighAvailability() != null
- && cluster.getHighAvailability().getEnabled() != null
- && Boolean.valueOf(cluster.getHighAvailability().getEnabled());
+ ICCServiceContext serviceCtx) {
+ boolean highAvailabilityEnabled =
+ cluster.getHighAvailability() != null && cluster.getHighAvailability().getEnabled() != null
+ && Boolean.valueOf(cluster.getHighAvailability().getEnabled());
if (!highAvailabilityEnabled || cluster.getHighAvailability().getFaultTolerance() == null
|| cluster.getHighAvailability().getFaultTolerance().getStrategy() == null) {
- return new NoFaultToleranceStrategy().from(repStrategy, messageBroker);
+ return new NoFaultToleranceStrategy().from(serviceCtx, repStrategy);
}
String strategyName = cluster.getHighAvailability().getFaultTolerance().getStrategy().toLowerCase();
if (!BUILT_IN_FAULT_TOLERANCE_STRATEGY.containsKey(strategyName)) {
@@ -58,7 +58,7 @@
}
Class<? extends IFaultToleranceStrategy> clazz = BUILT_IN_FAULT_TOLERANCE_STRATEGY.get(strategyName);
try {
- return clazz.newInstance().from(repStrategy, messageBroker);
+ return clazz.newInstance().from(serviceCtx, repStrategy);
} catch (InstantiationException | IllegalAccessException e) {
throw new IllegalStateException(e);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
index c40e236..1b57403 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/MetadataNodeFaultToleranceStrategy.java
@@ -46,6 +46,7 @@
import org.apache.asterix.common.api.INCLifecycleTask;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.cluster.IClusterStateManager;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.messaging.api.ICCMessageBroker;
@@ -54,8 +55,8 @@
import org.apache.asterix.common.replication.IReplicationStrategy;
import org.apache.asterix.common.replication.Replica;
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.util.FaultToleranceUtil;
+import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -66,6 +67,7 @@
private String metadataNodeId;
private IReplicationStrategy replicationStrategy;
private ICCMessageBroker messageBroker;
+ private ICCServiceContext serviceCtx;
private final Set<String> hotStandbyMetadataReplica = new HashSet<>();
private final Set<String> failedNodes = new HashSet<>();
private Set<String> pendingStartupCompletionNodes = new HashSet<>();
@@ -91,8 +93,8 @@
}
// If the failed node is the metadata node, ask its replicas to replay any committed jobs
if (nodeId.equals(metadataNodeId)) {
- int metadataPartitionId = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataPartition()
- .getPartitionId();
+ ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+ int metadataPartitionId = appCtx.getMetadataProperties().getMetadataPartition().getPartitionId();
Set<Integer> metadataPartition = new HashSet<>(Arrays.asList(metadataPartitionId));
Set<Replica> activeRemoteReplicas = replicationStrategy.getRemoteReplicas(metadataNodeId).stream()
.filter(replica -> !failedNodes.contains(replica.getId())).collect(Collectors.toSet());
@@ -110,10 +112,11 @@
}
@Override
- public IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker) {
+ public IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy) {
MetadataNodeFaultToleranceStrategy ft = new MetadataNodeFaultToleranceStrategy();
ft.replicationStrategy = replicationStrategy;
- ft.messageBroker = messageBroker;
+ ft.messageBroker = (ICCMessageBroker) serviceCtx.getMessageBroker();
+ ft.serviceCtx = serviceCtx;
return ft;
}
@@ -247,8 +250,8 @@
// Construct recovery plan: Node => Set of partitions to recover from it
Map<String, Set<Integer>> recoveryPlan = new HashMap<>();
// Recover metadata partition from any metadata hot standby replica
- int metadataPartitionId = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataPartition()
- .getPartitionId();
+ ICcApplicationContext appCtx = (ICcApplicationContext) serviceCtx.getApplicationContext();
+ int metadataPartitionId = appCtx.getMetadataProperties().getMetadataPartition().getPartitionId();
Set<Integer> metadataPartition = new HashSet<>(Arrays.asList(metadataPartitionId));
recoveryPlan.put(hotStandbyMetadataReplica.iterator().next(), metadataPartition);
return new RemoteRecoveryTask(recoveryPlan);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
index b8b3c49..b9ea135 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NoFaultToleranceStrategy.java
@@ -47,6 +47,7 @@
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.asterix.common.replication.IReplicationStrategy;
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
+import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class NoFaultToleranceStrategy implements IFaultToleranceStrategy {
@@ -54,8 +55,8 @@
private static final Logger LOGGER = Logger.getLogger(NoFaultToleranceStrategy.class.getName());
IClusterStateManager clusterManager;
private String metadataNodeId;
- private ICCMessageBroker messageBroker;
private Set<String> pendingStartupCompletionNodes = new HashSet<>();
+ private ICCMessageBroker messageBroker;
@Override
public void notifyNodeJoin(String nodeId) throws HyracksDataException {
@@ -87,9 +88,9 @@
}
@Override
- public IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker) {
+ public IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy) {
NoFaultToleranceStrategy ft = new NoFaultToleranceStrategy();
- ft.messageBroker = messageBroker;
+ ft.messageBroker = (ICCMessageBroker) serviceCtx.getMessageBroker();
return ft;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
index 0924838..feca7e8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackRequestMessage.java
@@ -23,14 +23,14 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.IRemoteRecoveryManager;
import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class CompleteFailbackRequestMessage extends AbstractFailbackPlanMessage {
+public class CompleteFailbackRequestMessage extends AbstractFailbackPlanMessage implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(CompleteFailbackRequestMessage.class.getName());
@@ -62,9 +62,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
- INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+ public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+ INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
HyracksDataException hde = null;
try {
IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
@@ -73,8 +72,8 @@
LOGGER.log(Level.SEVERE, "Failure during completion of failback process", e);
hde = HyracksDataException.create(e);
} finally {
- CompleteFailbackResponseMessage reponse = new CompleteFailbackResponseMessage(planId,
- requestId, partitions);
+ CompleteFailbackResponseMessage reponse =
+ new CompleteFailbackResponseMessage(planId, requestId, partitions);
try {
broker.sendMessageToCC(reponse);
} catch (Exception e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java
index fb45892..0c5678f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/CompleteFailbackResponseMessage.java
@@ -18,14 +18,15 @@
*/
package org.apache.asterix.app.replication.message;
-import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-
import java.util.Set;
-public class CompleteFailbackResponseMessage extends AbstractFailbackPlanMessage {
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class CompleteFailbackResponseMessage extends AbstractFailbackPlanMessage implements ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final Set<Integer> partitions;
@@ -49,8 +50,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
index 3af075e..03c7ac6 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/NCLifecycleTaskReportMessage.java
@@ -18,12 +18,13 @@
*/
package org.apache.asterix.app.replication.message;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class NCLifecycleTaskReportMessage implements INCLifecycleMessage {
+public class NCLifecycleTaskReportMessage implements INCLifecycleMessage, ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final String nodeId;
@@ -36,8 +37,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
public String getNodeId() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
index abfd6b2..cefcf49 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackRequestMessage.java
@@ -23,14 +23,15 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class PreparePartitionsFailbackRequestMessage extends AbstractFailbackPlanMessage {
+public class PreparePartitionsFailbackRequestMessage extends AbstractFailbackPlanMessage
+ implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(PreparePartitionsFailbackRequestMessage.class.getName());
@@ -71,9 +72,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
- INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+ public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+ INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
/**
* if the metadata partition will be failed back
* we need to flush and close all datasets including metadata datasets
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java
index e02cd42..bea1039 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/PreparePartitionsFailbackResponseMessage.java
@@ -18,14 +18,16 @@
*/
package org.apache.asterix.app.replication.message;
-import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-
import java.util.Set;
-public class PreparePartitionsFailbackResponseMessage extends AbstractFailbackPlanMessage {
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.runtime.message.AbstractFailbackPlanMessage;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class PreparePartitionsFailbackResponseMessage extends AbstractFailbackPlanMessage
+ implements ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final Set<Integer> partitions;
@@ -40,8 +42,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
index 5a73543..c8e2479 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsRequestMessage.java
@@ -22,14 +22,14 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.nc.NodeControllerService;
-public class ReplayPartitionLogsRequestMessage implements INCLifecycleMessage {
+public class ReplayPartitionLogsRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
private static final Logger LOGGER = Logger.getLogger(ReplayPartitionLogsRequestMessage.class.getName());
private static final long serialVersionUID = 1L;
@@ -40,9 +40,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- NodeControllerService ncs = (NodeControllerService) cs;
- IAppRuntimeContext appContext = (IAppRuntimeContext) ncs.getApplicationContext();
+ public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+ NodeControllerService ncs = (NodeControllerService) appContext.getServiceContext().getControllerService();
// Replay the logs for these partitions and flush any impacted dataset
appContext.getRemoteRecoveryManager().replayReplicaPartitionLogs(partitions, true);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java
index dc19735..e05fd47 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/ReplayPartitionLogsResponseMessage.java
@@ -18,14 +18,15 @@
*/
package org.apache.asterix.app.replication.message;
-import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-
import java.util.Set;
-public class ReplayPartitionLogsResponseMessage implements INCLifecycleMessage {
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.common.replication.INCLifecycleMessage;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ReplayPartitionLogsResponseMessage implements INCLifecycleMessage, ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final Set<Integer> partitions;
@@ -37,8 +38,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
public Set<Integer> getPartitions() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java
index 6a313f0..cfe999c 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskRequestMessage.java
@@ -21,15 +21,16 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.asterix.common.transactions.IRecoveryManager.SystemState;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.nc.NodeControllerService;
-public class StartupTaskRequestMessage implements INCLifecycleMessage {
+public class StartupTaskRequestMessage implements INCLifecycleMessage, ICcAddressedMessage {
private static final Logger LOGGER = Logger.getLogger(StartupTaskRequestMessage.class.getName());
private static final long serialVersionUID = 1L;
@@ -52,8 +53,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
public SystemState getState() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java
index 92abf5b..aaf3eb8 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/StartupTaskResponseMessage.java
@@ -23,12 +23,14 @@
import java.util.logging.Logger;
import org.apache.asterix.common.api.INCLifecycleTask;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.service.IControllerService;
-public class StartupTaskResponseMessage implements INCLifecycleMessage {
+public class StartupTaskResponseMessage implements INCLifecycleMessage, INcAddressedMessage {
private static final Logger LOGGER = Logger.getLogger(StartupTaskResponseMessage.class.getName());
private static final long serialVersionUID = 1L;
@@ -41,8 +43,9 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+ public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ INCMessageBroker broker = (INCMessageBroker) appCtx.getServiceContext().getMessageBroker();
+ IControllerService cs = appCtx.getServiceContext().getControllerService();
boolean success = true;
HyracksDataException exception = null;
try {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java
index fbc0a4d..2137924 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeRequestMessage.java
@@ -21,21 +21,20 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class TakeoverMetadataNodeRequestMessage implements INCLifecycleMessage {
+public class TakeoverMetadataNodeRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(TakeoverMetadataNodeRequestMessage.class.getName());
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
- INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+ public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+ INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
HyracksDataException hde = null;
try {
appContext.initializeMetadata(false);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java
index 428047c..ff1b2d2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverMetadataNodeResponseMessage.java
@@ -18,12 +18,13 @@
*/
package org.apache.asterix.app.replication.message;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class TakeoverMetadataNodeResponseMessage implements INCLifecycleMessage {
+public class TakeoverMetadataNodeResponseMessage implements INCLifecycleMessage, ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final String nodeId;
@@ -37,8 +38,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
index 09bb051..ea9ac55 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsRequestMessage.java
@@ -22,15 +22,15 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
import org.apache.asterix.common.replication.IRemoteRecoveryManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class TakeoverPartitionsRequestMessage implements INCLifecycleMessage {
+public class TakeoverPartitionsRequestMessage implements INCLifecycleMessage, INcAddressedMessage {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(TakeoverPartitionsRequestMessage.class.getName());
@@ -72,9 +72,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
- INCMessageBroker broker = (INCMessageBroker) cs.getContext().getMessageBroker();
+ public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
+ INCMessageBroker broker = (INCMessageBroker) appContext.getServiceContext().getMessageBroker();
//if the NC is shutting down, it should ignore takeover partitions request
if (!appContext.isShuttingdown()) {
HyracksDataException hde = null;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java
index e653a64..d9484f9 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/TakeoverPartitionsResponseMessage.java
@@ -18,12 +18,13 @@
*/
package org.apache.asterix.app.replication.message;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.replication.INCLifecycleMessage;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class TakeoverPartitionsResponseMessage implements INCLifecycleMessage {
+public class TakeoverPartitionsResponseMessage implements INCLifecycleMessage, ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final Integer[] partitions;
@@ -49,8 +50,8 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- AppContextInfo.INSTANCE.getFaultToleranceStrategy().process(this);
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ((CcApplicationContext) appCtx).getFaultToleranceStrategy().process(this);
}
@Override
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java
index 590b854..7ed3aef 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultPrinter.java
@@ -24,6 +24,7 @@
import java.io.StringWriter;
import java.nio.ByteBuffer;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.translator.IStatementExecutor.Stats;
import org.apache.asterix.translator.SessionConfig;
@@ -44,8 +45,7 @@
public class ResultPrinter {
- // TODO(tillw): Should this be static?
- private static FrameManager resultDisplayFrameMgr = new FrameManager(ResultReader.FRAME_SIZE);
+ private final FrameManager resultDisplayFrameMgr;
private final SessionConfig conf;
private final Stats stats;
@@ -62,12 +62,13 @@
private ObjectMapper om;
private ObjectWriter ow;
- public ResultPrinter(SessionConfig conf, Stats stats, ARecordType recordType) {
+ public ResultPrinter(ICcApplicationContext appCtx, SessionConfig conf, Stats stats, ARecordType recordType) {
this.conf = conf;
this.stats = stats;
this.recordType = recordType;
this.indentJSON = conf.is(SessionConfig.FORMAT_INDENT_JSON);
this.quoteRecord = conf.is(SessionConfig.FORMAT_QUOTE_RECORD);
+ this.resultDisplayFrameMgr = new FrameManager(appCtx.getCompilerProperties().getFrameSize());
if (indentJSON) {
this.om = new ObjectMapper();
DefaultPrettyPrinter.Indenter i = new DefaultPrettyPrinter.Indenter() {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java
index 1871476..eeb01ba 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/result/ResultReader.java
@@ -18,7 +18,6 @@
*/
package org.apache.asterix.app.result;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameTupleAccessor;
import org.apache.hyracks.api.dataset.DatasetJobRecord.Status;
@@ -37,8 +36,6 @@
// Number of parallel result reader buffers
public static final int NUM_READERS = 1;
- public static final int FRAME_SIZE = AppContextInfo.INSTANCE.getCompilerProperties().getFrameSize();
-
public ResultReader(IHyracksDataset hdc, JobId jobId, ResultSetId resultSetId) throws HyracksDataException {
reader = hdc.createReader(jobId, resultSetId);
frameTupleAccessor = new ResultFrameTupleAccessor();
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java
index 15ed1b4..e2f17ac 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/DefaultStatementExecutorFactory.java
@@ -23,6 +23,7 @@
import java.util.concurrent.Executors;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.translator.IStatementExecutor;
@@ -47,8 +48,9 @@
}
@Override
- public IStatementExecutor create(List<Statement> statements, SessionConfig conf,
+ public IStatementExecutor create(ICcApplicationContext appCtx, List<Statement> statements, SessionConfig conf,
ILangCompilationProvider compilationProvider, IStorageComponentProvider storageComponentProvider) {
- return new QueryTranslator(statements, conf, compilationProvider, storageComponentProvider, executorService);
+ return new QueryTranslator(appCtx, statements, conf, compilationProvider, storageComponentProvider,
+ executorService);
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
index cb59f5c..e97a7e1 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java
@@ -39,6 +39,7 @@
import java.util.logging.Logger;
import org.apache.asterix.active.ActiveJobNotificationHandler;
+import org.apache.asterix.active.ActiveLifecycleListener;
import org.apache.asterix.active.ActivityState;
import org.apache.asterix.active.EntityId;
import org.apache.asterix.active.IActiveEntityEventsListener;
@@ -58,6 +59,7 @@
import org.apache.asterix.common.config.ExternalProperties;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
@@ -149,7 +151,6 @@
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.types.TypeSignature;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.transaction.management.service.transaction.DatasetIdFactory;
import org.apache.asterix.translator.AbstractLangTranslator;
import org.apache.asterix.translator.CompiledStatements.CompiledDeleteStatement;
@@ -201,6 +202,7 @@
public static final boolean IS_DEBUG_MODE = false;// true
protected final List<Statement> statements;
+ protected final ICcApplicationContext appCtx;
protected final SessionConfig sessionConfig;
protected Dataverse activeDataverse;
protected final List<FunctionDecl> declaredFunctions;
@@ -209,8 +211,10 @@
protected final IStorageComponentProvider componentProvider;
protected final ExecutorService executorService;
- public QueryTranslator(List<Statement> statements, SessionConfig conf, ILangCompilationProvider compliationProvider,
- IStorageComponentProvider componentProvider, ExecutorService executorService) {
+ public QueryTranslator(ICcApplicationContext appCtx, List<Statement> statements, SessionConfig conf,
+ ILangCompilationProvider compliationProvider, IStorageComponentProvider componentProvider,
+ ExecutorService executorService) {
+ this.appCtx = appCtx;
this.statements = statements;
this.sessionConfig = conf;
this.componentProvider = componentProvider;
@@ -260,9 +264,9 @@
if (sessionConfig.is(SessionConfig.FORMAT_HTML)) {
sessionConfig.out().println(ApiServlet.HTML_STATEMENT_SEPARATOR);
}
- validateOperation(activeDataverse, stmt);
+ validateOperation(appCtx, activeDataverse, stmt);
rewriteStatement(stmt); // Rewrite the statement's AST.
- MetadataProvider metadataProvider = new MetadataProvider(activeDataverse, componentProvider);
+ MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse, componentProvider);
metadataProvider.setWriterFactory(writerFactory);
metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider);
metadataProvider.setOutputFile(outputFile);
@@ -533,7 +537,7 @@
throw new AlgebricksException(": type " + itemTypeName + " could not be found.");
}
String ngName = ngNameId != null ? ngNameId.getValue()
- : configureNodegroupForDataset(dataverseName, datasetName, dd.getHints(), mdTxnCtx);
+ : configureNodegroupForDataset(appCtx, dd.getHints(), dataverseName, datasetName, mdTxnCtx);
if (compactionPolicy == null) {
compactionPolicy = GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME;
@@ -677,7 +681,9 @@
protected void validateIfResourceIsActiveInFeed(Dataset dataset) throws CompilationException {
StringBuilder builder = null;
- IActiveEntityEventsListener[] listeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+ IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
for (IActiveEntityEventsListener listener : listeners) {
if (listener.isEntityUsingDataset(dataset)) {
if (builder == null) {
@@ -704,8 +710,8 @@
}
}
- protected static String configureNodegroupForDataset(String dataverseName, String datasetName,
- Map<String, String> hints, MetadataTransactionContext mdTxnCtx) throws CompilationException {
+ protected static String configureNodegroupForDataset(ICcApplicationContext appCtx, Map<String, String> hints,
+ String dataverseName, String datasetName, MetadataTransactionContext mdTxnCtx) throws CompilationException {
int nodegroupCardinality;
String nodegroupName;
String hintValue = hints.get(DatasetNodegroupCardinalityHint.NAME);
@@ -714,16 +720,16 @@
return nodegroupName;
} else {
int numChosen = 0;
- boolean valid = DatasetHints.validate(DatasetNodegroupCardinalityHint.NAME,
+ boolean valid = DatasetHints.validate(appCtx, DatasetNodegroupCardinalityHint.NAME,
hints.get(DatasetNodegroupCardinalityHint.NAME)).first;
if (!valid) {
throw new CompilationException("Incorrect use of hint:" + DatasetNodegroupCardinalityHint.NAME);
} else {
nodegroupCardinality = Integer.parseInt(hints.get(DatasetNodegroupCardinalityHint.NAME));
}
- List<String> nodeNames = AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames();
+ List<String> nodeNames = appCtx.getMetadataProperties().getNodeNames();
List<String> nodeNamesClone = new ArrayList<>(nodeNames);
- String metadataNodeName = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataNodeName();
+ String metadataNodeName = appCtx.getMetadataProperties().getMetadataNodeName();
List<String> selectedNodes = new ArrayList<>();
selectedNodes.add(metadataNodeName);
numChosen++;
@@ -1168,9 +1174,11 @@
}
}
// # disconnect all feeds from any datasets in the dataverse.
- IActiveEntityEventsListener[] activeListeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+ IActiveEntityEventsListener[] activeListeners = activeEventHandler.getEventListeners();
Identifier dvId = new Identifier(dataverseName);
- MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getDefaultDataverse(),
+ MetadataProvider tempMdProvider = new MetadataProvider(appCtx, metadataProvider.getDefaultDataverse(),
metadataProvider.getStorageComponentProvider());
tempMdProvider.setConfig(metadataProvider.getConfig());
for (IActiveEntityEventsListener listener : activeListeners) {
@@ -1181,7 +1189,7 @@
stopFeedBeforeDelete(new Pair<>(dvId, new Identifier(activeEntityId.getEntityName())),
tempMdProvider);
// prepare job to remove feed log storage
- jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(
+ jobsToExecute.add(FeedOperations.buildRemoveFeedStorageJob(metadataProvider,
MetadataManager.INSTANCE.getFeed(mdTxnCtx, dataverseName, activeEntityId.getEntityName())));
}
}
@@ -1395,7 +1403,9 @@
throw new AlgebricksException(
"There is no dataset with this name " + datasetName + " in dataverse " + dataverseName);
}
- IActiveEntityEventsListener[] listeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+ IActiveEntityEventsListener[] listeners = activeEventHandler.getEventListeners();
StringBuilder builder = null;
for (IActiveEntityEventsListener listener : listeners) {
if (listener.isEntityUsingDataset(ds)) {
@@ -1860,7 +1870,7 @@
}
String adaptorName = cfs.getAdaptorName();
feed = new Feed(dataverseName, feedName, adaptorName, cfs.getAdaptorConfiguration());
- FeedMetadataUtil.validateFeed(feed, mdTxnCtx, metadataProvider.getLibraryManager());
+ FeedMetadataUtil.validateFeed(feed, mdTxnCtx, appCtx);
MetadataManager.INSTANCE.addFeed(metadataProvider.getMetadataTxnContext(), feed);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
@@ -1954,13 +1964,14 @@
}
EntityId feedId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
- FeedEventsListener listener =
- (FeedEventsListener) ActiveJobNotificationHandler.INSTANCE.getActiveEntityListener(feedId);
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+ FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(feedId);
if (listener != null) {
throw new AlgebricksException("Feed " + feedId
+ " is currently active and connected to the following dataset(s) \n" + listener.toString());
} else {
- JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(
+ JobSpecification spec = FeedOperations.buildRemoveFeedStorageJob(metadataProvider,
MetadataManager.INSTANCE.getFeed(mdTxnCtx, feedId.getDataverse(), feedId.getEntityName()));
JobUtils.runJob(hcc, spec, true);
MetadataManager.INSTANCE.dropFeed(mdTxnCtx, dataverseName, feedName);
@@ -2024,8 +2035,9 @@
ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
- FeedEventsListener listener =
- (FeedEventsListener) ActiveJobNotificationHandler.INSTANCE.getActiveEntityListener(entityId);
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+ FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(entityId);
if (listener != null) {
throw new AlgebricksException("Feed " + feedName + " is started already.");
}
@@ -2044,8 +2056,8 @@
compilationProvider, storageComponentProvider, qtFactory, hcc);
JobSpecification feedJob = jobInfo.getLeft();
- listener = new FeedEventsListener(entityId, datasets, jobInfo.getRight().getLocations());
- ActiveJobNotificationHandler.INSTANCE.registerListener(listener);
+ listener = new FeedEventsListener(appCtx, entityId, datasets, jobInfo.getRight().getLocations());
+ activeEventHandler.registerListener(listener);
IActiveEventSubscriber eventSubscriber = listener.subscribe(ActivityState.STARTED);
feedJob.setProperty(ActiveJobNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
JobUtils.runJob(hcc, feedJob,
@@ -2055,7 +2067,7 @@
} catch (Exception e) {
abort(e, e, mdTxnCtx);
if (listener != null) {
- ActiveJobNotificationHandler.INSTANCE.unregisterListener(listener);
+ activeEventHandler.unregisterListener(listener);
}
throw e;
} finally {
@@ -2068,9 +2080,10 @@
String dataverseName = getActiveDataverse(sfst.getDataverseName());
String feedName = sfst.getFeedName().getValue();
EntityId feedId = new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName);
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
// Obtain runtime info from ActiveListener
- FeedEventsListener listener =
- (FeedEventsListener) ActiveJobNotificationHandler.INSTANCE.getActiveEntityListener(feedId);
+ FeedEventsListener listener = (FeedEventsListener) activeEventHandler.getActiveEntityListener(feedId);
if (listener == null) {
throw new AlgebricksException("Feed " + feedName + " is not started.");
}
@@ -2085,7 +2098,7 @@
// Construct ActiveMessage
for (int i = 0; i < listener.getSources().length; i++) {
String intakeLocation = listener.getSources()[i];
- FeedOperations.SendStopMessageToNode(feedId, intakeLocation, i);
+ FeedOperations.SendStopMessageToNode(appCtx, feedId, intakeLocation, i);
}
eventSubscriber.sync();
} catch (Exception e) {
@@ -2106,7 +2119,9 @@
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// Check whether feed is alive
- if (ActiveJobNotificationHandler.INSTANCE
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
+ if (activeEventHandler
.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
}
@@ -2145,8 +2160,10 @@
String feedName = cfs.getFeedName().getValue();
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ ActiveLifecycleListener activeListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ ActiveJobNotificationHandler activeEventHandler = activeListener.getNotificationHandler();
// Check whether feed is alive
- if (ActiveJobNotificationHandler.INSTANCE
+ if (activeEventHandler
.getActiveEntityListener(new EntityId(Feed.EXTENSION_NAME, dataverseName, feedName)) != null) {
throw new CompilationException(ErrorCode.FEED_CHANGE_FEED_CONNECTIVITY_ON_ALIVE_FEED, feedName);
}
@@ -2322,7 +2339,7 @@
case IMMEDIATE:
createAndRunJob(hcc, null, compiler, locker, resultDelivery, id -> {
final ResultReader resultReader = new ResultReader(hdc, id, resultSetId);
- ResultUtil.printResults(resultReader, sessionConfig, stats,
+ ResultUtil.printResults(appCtx, resultReader, sessionConfig, stats,
metadataProvider.findOutputRecordType());
}, clientContextId, ctx);
break;
@@ -2698,7 +2715,7 @@
if (pregelixHome == null) {
// Since there is a default value for PREGELIX_HOME in CompilerProperties,
// pregelixHome can never be null.
- pregelixHome = AppContextInfo.INSTANCE.getCompilerProperties().getPregelixHome();
+ pregelixHome = appCtx.getCompilerProperties().getPregelixHome();
}
// Constructs the pregelix command line.
@@ -2819,7 +2836,7 @@
protected List<String> constructPregelixCommand(RunStatement pregelixStmt, String fromDataverseName,
String fromDatasetName, String toDataverseName, String toDatasetName) {
// Constructs AsterixDB parameters, e.g., URL, source dataset and sink dataset.
- ExternalProperties externalProperties = AppContextInfo.INSTANCE.getExternalProperties();
+ ExternalProperties externalProperties = appCtx.getExternalProperties();
String clientIP = ClusterProperties.INSTANCE.getCluster().getMasterNode().getClientIp();
StringBuilder asterixdbParameterBuilder = new StringBuilder();
asterixdbParameterBuilder.append(
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
index d4f2129..c9b3565 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixCLI.java
@@ -27,6 +27,7 @@
import org.apache.asterix.api.java.AsterixJavaClient;
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.file.StorageComponentProvider;
@@ -58,7 +59,9 @@
try {
for (String queryFile : options.args) {
Reader in = new FileReader(queryFile);
- AsterixJavaClient ajc = new AsterixJavaClient(integrationUtil.getHyracksClientConnection(), in,
+ AsterixJavaClient ajc =
+ new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(),
+ integrationUtil.getHyracksClientConnection(), in,
compilationProvider, new DefaultStatementExecutorFactory(), new StorageComponentProvider());
try {
ajc.compile(true, false, false, false, false, true, false);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
index f44aeb9..ba44833 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
@@ -64,7 +64,7 @@
boolean onlyPhysical, boolean createBinaryRuntime) throws Exception {
ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
FileReader reader = new FileReader(filename);
- AsterixJavaClient q = new AsterixJavaClient(hcc, reader, compilationProvider,
+ AsterixJavaClient q = new AsterixJavaClient(null, hcc, reader, compilationProvider,
new DefaultStatementExecutorFactory(), new StorageComponentProvider());
q.compile(optimize, true, true, true, onlyPhysical, createBinaryRuntime, createBinaryRuntime);
return q;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 578c206..9c66f57 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -60,7 +60,6 @@
import org.apache.asterix.common.config.MetadataProperties;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.library.ILibraryManager;
-import org.apache.asterix.common.messaging.api.ICCMessageBroker;
import org.apache.asterix.common.replication.IFaultToleranceStrategy;
import org.apache.asterix.common.replication.IReplicationStrategy;
import org.apache.asterix.common.utils.Servlets;
@@ -72,7 +71,7 @@
import org.apache.asterix.metadata.bootstrap.AsterixStateProxy;
import org.apache.asterix.metadata.cluster.ClusterManagerProvider;
import org.apache.asterix.runtime.job.resource.JobCapacityController;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.asterix.translator.IStatementExecutorFactory;
import org.apache.hyracks.api.application.ICCServiceContext;
@@ -96,8 +95,9 @@
protected ICCServiceContext ccServiceCtx;
protected CCExtensionManager ccExtensionManager;
protected IStorageComponentProvider componentProvider;
- private IJobCapacityController jobCapacityController;
protected WebManager webManager;
+ protected CcApplicationContext appCtx;
+ private IJobCapacityController jobCapacityController;
@Override
public void start(IServiceContext serviceCtx, String[] args) throws Exception {
@@ -105,8 +105,8 @@
throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
}
final ClusterControllerService controllerService = (ClusterControllerService) serviceCtx.getControllerService();
- ICCMessageBroker messageBroker = new CCMessageBroker(controllerService);
this.ccServiceCtx = (ICCServiceContext) serviceCtx;
+ ccServiceCtx.setMessageBroker(new CCMessageBroker(controllerService));
configureLoggingLevel(ccServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
@@ -120,34 +120,33 @@
ResourceIdManager resourceIdManager = new ResourceIdManager();
IReplicationStrategy repStrategy = ClusterProperties.INSTANCE.getReplicationStrategy();
IFaultToleranceStrategy ftStrategy = FaultToleranceStrategyFactory
- .create(ClusterProperties.INSTANCE.getCluster(), repStrategy, messageBroker);
+ .create(ClusterProperties.INSTANCE.getCluster(), repStrategy, ccServiceCtx);
ExternalLibraryUtils.setUpExternaLibraries(libraryManager, false);
componentProvider = new StorageComponentProvider();
GlobalRecoveryManager.instantiate((HyracksConnection) getHcc(), componentProvider);
- AppContextInfo.initialize(ccServiceCtx, getHcc(), libraryManager, resourceIdManager,
- () -> MetadataManager.INSTANCE, GlobalRecoveryManager.instance(), ftStrategy);
+ appCtx = new CcApplicationContext(ccServiceCtx, getHcc(), libraryManager, resourceIdManager,
+ () -> MetadataManager.INSTANCE, GlobalRecoveryManager.instance(), ftStrategy,
+ new ActiveLifecycleListener());
+ ClusterStateManager.INSTANCE.setCcAppCtx(appCtx);
ccExtensionManager = new CCExtensionManager(getExtensions());
- AppContextInfo.INSTANCE.setExtensionManager(ccExtensionManager);
+ appCtx.setExtensionManager(ccExtensionManager);
final CCConfig ccConfig = controllerService.getCCConfig();
if (System.getProperty("java.rmi.server.hostname") == null) {
System.setProperty("java.rmi.server.hostname", ccConfig.getClusterListenAddress());
}
- MetadataProperties metadataProperties = AppContextInfo.INSTANCE.getMetadataProperties();
+ MetadataProperties metadataProperties = appCtx.getMetadataProperties();
setAsterixStateProxy(AsterixStateProxy.registerRemoteObject(metadataProperties.getMetadataCallbackPort()));
ccServiceCtx.setDistributedState(proxy);
-
MetadataManager.initialize(proxy, metadataProperties);
-
- AppContextInfo.INSTANCE.getCCServiceContext().addJobLifecycleListener(ActiveLifecycleListener.INSTANCE);
+ ccServiceCtx.addJobLifecycleListener(appCtx.getActiveLifecycleListener());
// create event loop groups
webManager = new WebManager();
configureServers();
webManager.start();
ClusterManagerProvider.getClusterManager().registerSubscriber(GlobalRecoveryManager.instance());
- ccServiceCtx.addClusterLifecycleListener(ClusterLifecycleListener.INSTANCE);
- ccServiceCtx.setMessageBroker(messageBroker);
+ ccServiceCtx.addClusterLifecycleListener(new ClusterLifecycleListener(appCtx));
jobCapacityController = new JobCapacityController(controllerService.getResourceManager());
}
@@ -160,18 +159,18 @@
}
protected List<AsterixExtension> getExtensions() {
- return AppContextInfo.INSTANCE.getExtensionProperties().getExtensions();
+ return appCtx.getExtensionProperties().getExtensions();
}
protected void configureServers() throws Exception {
- webManager.add(setupWebServer(AppContextInfo.INSTANCE.getExternalProperties()));
- webManager.add(setupJSONAPIServer(AppContextInfo.INSTANCE.getExternalProperties()));
- webManager.add(setupQueryWebServer(AppContextInfo.INSTANCE.getExternalProperties()));
+ webManager.add(setupWebServer(appCtx.getExternalProperties()));
+ webManager.add(setupJSONAPIServer(appCtx.getExternalProperties()));
+ webManager.add(setupQueryWebServer(appCtx.getExternalProperties()));
}
@Override
public void stop() throws Exception {
- ActiveLifecycleListener.INSTANCE.stop();
+ ((ActiveLifecycleListener) appCtx.getActiveLifecycleListener()).stop();
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Stopping Asterix cluster controller");
}
@@ -184,7 +183,7 @@
externalProperties.getWebInterfacePort());
IHyracksClientConnection hcc = getHcc();
webServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
- webServer.addServlet(new ApiServlet(webServer.ctx(), new String[] { "/*" },
+ webServer.addServlet(new ApiServlet(webServer.ctx(), new String[] { "/*" }, appCtx,
ccExtensionManager.getAqlCompilationProvider(), ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider));
return webServer;
@@ -195,7 +194,7 @@
new HttpServer(webManager.getBosses(), webManager.getWorkers(), externalProperties.getAPIServerPort());
IHyracksClientConnection hcc = getHcc();
jsonAPIServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
- jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, AppContextInfo.INSTANCE);
+ jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, appCtx);
jsonAPIServer.setAttribute(ServletConstants.EXECUTOR_SERVICE_ATTR,
ccServiceCtx.getControllerService().getExecutor());
@@ -235,47 +234,47 @@
externalProperties.getQueryWebInterfacePort());
IHyracksClientConnection hcc = getHcc();
queryWebServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
- queryWebServer.addServlet(new QueryWebInterfaceServlet(queryWebServer.ctx(), new String[] { "/*" }));
+ queryWebServer.addServlet(new QueryWebInterfaceServlet(appCtx, queryWebServer.ctx(), new String[] { "/*" }));
return queryWebServer;
}
protected IServlet createServlet(ConcurrentMap<String, Object> ctx, String key, String... paths) {
switch (key) {
case Servlets.AQL:
- return new FullApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new FullApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.AQL_QUERY:
- return new QueryApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new QueryApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.AQL_UPDATE:
- return new UpdateApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new UpdateApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.AQL_DDL:
- return new DdlApiServlet(ctx, paths, ccExtensionManager.getAqlCompilationProvider(),
+ return new DdlApiServlet(ctx, paths, appCtx, ccExtensionManager.getAqlCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP:
- return new FullApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new FullApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP_QUERY:
- return new QueryApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new QueryApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP_UPDATE:
- return new UpdateApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new UpdateApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.SQLPP_DDL:
- return new DdlApiServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new DdlApiServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.RUNNING_REQUESTS:
return new QueryCancellationServlet(ctx, paths);
case Servlets.QUERY_STATUS:
- return new QueryStatusApiServlet(ctx, paths);
+ return new QueryStatusApiServlet(ctx, paths, appCtx);
case Servlets.QUERY_RESULT:
- return new QueryResultApiServlet(ctx, paths);
+ return new QueryResultApiServlet(ctx, paths, appCtx);
case Servlets.QUERY_SERVICE:
- return new QueryServiceServlet(ctx, paths, ccExtensionManager.getSqlppCompilationProvider(),
+ return new QueryServiceServlet(ctx, paths, appCtx, ccExtensionManager.getSqlppCompilationProvider(),
getStatementExecutorFactory(), componentProvider);
case Servlets.CONNECTOR:
- return new ConnectorApiServlet(ctx, paths);
+ return new ConnectorApiServlet(ctx, paths, appCtx);
case Servlets.SHUTDOWN:
return new ShutdownApiServlet(ctx, paths);
case Servlets.VERSION:
@@ -287,7 +286,7 @@
case Servlets.CLUSTER_STATE_CC_DETAIL:
return new ClusterControllerDetailsApiServlet(ctx, paths);
case Servlets.DIAGNOSTICS:
- return new DiagnosticsApiServlet(ctx, paths);
+ return new DiagnosticsApiServlet(ctx, paths, appCtx);
default:
throw new IllegalStateException(String.valueOf(key));
}
@@ -299,7 +298,7 @@
@Override
public void startupCompleted() throws Exception {
- ccServiceCtx.getControllerService().getExecutor().submit((Callable)() -> {
+ ccServiceCtx.getControllerService().getExecutor().submit((Callable) () -> {
ClusterStateManager.INSTANCE.waitForState(ClusterState.ACTIVE);
ClusterManagerProvider.getClusterManager().notifyStartupCompleted();
return null;
@@ -322,8 +321,8 @@
}
@Override
- public AppContextInfo getApplicationContext() {
- return AppContextInfo.INSTANCE;
+ public CcApplicationContext getApplicationContext() {
+ return appCtx;
}
protected IHyracksClientConnection getHcc() throws Exception {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index 8883504..66f76c5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -41,6 +41,7 @@
import org.apache.asterix.metadata.cluster.ClusterManagerProvider;
import org.apache.asterix.metadata.cluster.RemoveNodeWork;
import org.apache.asterix.metadata.cluster.RemoveNodeWorkResponse;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.api.application.IClusterLifecycleListener;
import org.apache.hyracks.api.config.IOption;
@@ -49,16 +50,14 @@
public class ClusterLifecycleListener implements IClusterLifecycleListener {
private static final Logger LOGGER = Logger.getLogger(ClusterLifecycleListener.class.getName());
+ private final CcApplicationContext appCtx;
+ private final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<>();
+ private final ClusterWorkExecutor eventHandler;
+ private final List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<>();
- private static final LinkedBlockingQueue<Set<IClusterManagementWork>> workRequestQueue = new LinkedBlockingQueue<Set<IClusterManagementWork>>();
-
- private static ClusterWorkExecutor eventHandler = new ClusterWorkExecutor(workRequestQueue);
-
- private static List<IClusterManagementWorkResponse> pendingWorkResponses = new ArrayList<IClusterManagementWorkResponse>();
-
- public static ClusterLifecycleListener INSTANCE = new ClusterLifecycleListener();
-
- private ClusterLifecycleListener() {
+ public ClusterLifecycleListener(CcApplicationContext appCtx) {
+ this.appCtx = appCtx;
+ eventHandler = new ClusterWorkExecutor(appCtx, workRequestQueue);
Thread t = new Thread(eventHandler);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Starting cluster event handler");
@@ -78,12 +77,12 @@
MetadataManager.INSTANCE.rebindMetadataNode();
}
- Set<String> nodeAddition = new HashSet<String>();
+ Set<String> nodeAddition = new HashSet<>();
nodeAddition.add(nodeId);
updateProgress(ClusterEventType.NODE_JOIN, nodeAddition);
Set<IClusterEventsSubscriber> subscribers =
ClusterManagerProvider.getClusterManager().getRegisteredClusterEventSubscribers();
- Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+ Set<IClusterManagementWork> work = new HashSet<>();
for (IClusterEventsSubscriber sub : subscribers) {
Set<IClusterManagementWork> workRequest = sub.notifyNodeJoin(nodeId);
work.addAll(workRequest);
@@ -110,7 +109,7 @@
updateProgress(ClusterEventType.NODE_FAILURE, deadNodeIds);
Set<IClusterEventsSubscriber> subscribers =
ClusterManagerProvider.getClusterManager().getRegisteredClusterEventSubscribers();
- Set<IClusterManagementWork> work = new HashSet<IClusterManagementWork>();
+ Set<IClusterManagementWork> work = new HashSet<>();
for (IClusterEventsSubscriber sub : subscribers) {
Set<IClusterManagementWork> workRequest = sub.notifyNodeFailure(deadNodeIds);
work.addAll(workRequest);
@@ -121,7 +120,7 @@
}
private void updateProgress(ClusterEventType eventType, Collection<String> nodeIds) {
- List<IClusterManagementWorkResponse> completedResponses = new ArrayList<IClusterManagementWorkResponse>();
+ List<IClusterManagementWorkResponse> completedResponses = new ArrayList<>();
boolean isComplete = false;
for (IClusterManagementWorkResponse resp : pendingWorkResponses) {
switch (eventType) {
@@ -149,9 +148,9 @@
private void executeWorkSet(Set<IClusterManagementWork> workSet) {
int nodesToAdd = 0;
- Set<String> nodesToRemove = new HashSet<String>();
- Set<AddNodeWork> nodeAdditionRequests = new HashSet<AddNodeWork>();
- Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+ Set<String> nodesToRemove = new HashSet<>();
+ Set<AddNodeWork> nodeAdditionRequests = new HashSet<>();
+ Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>();
for (IClusterManagementWork w : workSet) {
switch (w.getClusterManagementWorkType()) {
case ADD_NODE:
@@ -163,20 +162,20 @@
case REMOVE_NODE:
nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
nodeRemovalRequests.add(w);
- RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w,
- Status.IN_PROGRESS);
+ RemoveNodeWorkResponse response =
+ new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
pendingWorkResponses.add(response);
break;
}
}
- List<String> addedNodes = new ArrayList<String>();
+ List<String> addedNodes = new ArrayList<>();
String asterixInstanceName = ClusterProperties.INSTANCE.getCluster().getInstanceName();
for (int i = 0; i < nodesToAdd; i++) {
Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode();
if (node != null) {
try {
- ClusterManagerProvider.getClusterManager().addNode(node);
+ ClusterManagerProvider.getClusterManager().addNode(appCtx, node);
addedNodes.add(asterixInstanceName + "_" + node.getId());
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Added NC at:" + node.getId());
@@ -197,7 +196,7 @@
for (AddNodeWork w : nodeAdditionRequests) {
int n = w.getNumberOfNodesRequested();
- List<String> nodesToBeAddedForWork = new ArrayList<String>();
+ List<String> nodesToBeAddedForWork = new ArrayList<>();
for (int i = 0; i < n && i < addedNodes.size(); i++) {
nodesToBeAddedForWork.add(addedNodes.get(i));
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
index 71fff3f..46968b4 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterWorkExecutor.java
@@ -25,6 +25,7 @@
import java.util.logging.Logger;
import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.event.schema.cluster.Node;
import org.apache.asterix.metadata.cluster.AddNodeWork;
@@ -36,9 +37,11 @@
private static final Logger LOGGER = Logger.getLogger(ClusterWorkExecutor.class.getName());
+ private final ICcApplicationContext appCtx;
private final LinkedBlockingQueue<Set<IClusterManagementWork>> inbox;
- public ClusterWorkExecutor(LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
+ public ClusterWorkExecutor(ICcApplicationContext appCtx, LinkedBlockingQueue<Set<IClusterManagementWork>> inbox) {
+ this.appCtx = appCtx;
this.inbox = inbox;
}
@@ -48,9 +51,9 @@
try {
Set<IClusterManagementWork> workSet = inbox.take();
int nodesToAdd = 0;
- Set<String> nodesToRemove = new HashSet<String>();
- Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<IClusterManagementWork>();
- Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<IClusterManagementWork>();
+ Set<String> nodesToRemove = new HashSet<>();
+ Set<IClusterManagementWork> nodeAdditionRequests = new HashSet<>();
+ Set<IClusterManagementWork> nodeRemovalRequests = new HashSet<>();
for (IClusterManagementWork w : workSet) {
switch (w.getClusterManagementWorkType()) {
case ADD_NODE:
@@ -66,12 +69,12 @@
}
}
- Set<Node> addedNodes = new HashSet<Node>();
+ Set<Node> addedNodes = new HashSet<>();
for (int i = 0; i < nodesToAdd; i++) {
Node node = ClusterStateManager.INSTANCE.getAvailableSubstitutionNode();
if (node != null) {
try {
- ClusterManagerProvider.getClusterManager().addNode(node);
+ ClusterManagerProvider.getClusterManager().addNode(appCtx, node);
addedNodes.add(node);
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Added NC at:" + node.getId());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index 7bd1e62..722bb78 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -33,6 +33,7 @@
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
import org.apache.asterix.common.config.DatasetConfig.TransactionState;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.external.indexing.ExternalFile;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -71,7 +72,6 @@
@Override
public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
- startGlobalRecovery();
return Collections.emptySet();
}
@@ -92,7 +92,7 @@
}
@Override
- public void startGlobalRecovery() {
+ public void startGlobalRecovery(ICcApplicationContext appCtx) {
// perform global recovery if state changed to active
final ClusterState newState = ClusterStateManager.INSTANCE.getState();
boolean needToRecover = !newState.equals(state) && (newState == ClusterState.ACTIVE);
@@ -110,7 +110,8 @@
List<Dataverse> dataverses = MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
for (Dataverse dataverse : dataverses) {
if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
- MetadataProvider metadataProvider = new MetadataProvider(dataverse, componentProvider);
+ MetadataProvider metadataProvider =
+ new MetadataProvider(appCtx, dataverse, componentProvider);
try {
List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
dataverse.getDataverseName());
@@ -120,11 +121,11 @@
// Get indexes
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
dataset.getDataverseName(), dataset.getDatasetName());
+ // Get the state of the dataset
+ ExternalDatasetDetails dsd =
+ (ExternalDatasetDetails) dataset.getDatasetDetails();
+ TransactionState datasetState = dsd.getState();
if (!indexes.isEmpty()) {
- // Get the state of the dataset
- ExternalDatasetDetails dsd =
- (ExternalDatasetDetails) dataset.getDatasetDetails();
- TransactionState datasetState = dsd.getState();
if (datasetState == TransactionState.BEGIN) {
List<ExternalFile> files = MetadataManager.INSTANCE
.getDatasetExternalFiles(mdTxnCtx, dataset);
@@ -135,59 +136,55 @@
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
}
}
- // 2. clean artifacts in NCs
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- JobSpecification jobSpec = ExternalIndexingOperations
- .buildAbortOp(dataset, indexes, metadataProvider);
- executeHyracksJob(jobSpec);
- // 3. correct the dataset state
- ((ExternalDatasetDetails) dataset.getDatasetDetails())
- .setState(TransactionState.COMMIT);
- MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- } else if (datasetState == TransactionState.READY_TO_COMMIT) {
- List<ExternalFile> files = MetadataManager.INSTANCE
- .getDatasetExternalFiles(mdTxnCtx, dataset);
- // if ready to commit, roll forward
- // 1. commit indexes in NCs
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- JobSpecification jobSpec = ExternalIndexingOperations
- .buildRecoverOp(dataset, indexes, metadataProvider);
- executeHyracksJob(jobSpec);
- // 2. add pending files in metadata
- for (ExternalFile file : files) {
- if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
- MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
- file.setPendingOp(ExternalFilePendingOp.NO_OP);
- MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
- } else if (file
- .getPendingOp() == ExternalFilePendingOp.DROP_OP) {
- // find original file
- for (ExternalFile originalFile : files) {
- if (originalFile.getFileName()
- .equals(file.getFileName())) {
- MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
- file);
- MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
- originalFile);
- break;
- }
+ }
+ // 2. clean artifacts in NCs
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ JobSpecification jobSpec = ExternalIndexingOperations
+ .buildAbortOp(dataset, indexes, metadataProvider);
+ executeHyracksJob(jobSpec);
+ // 3. correct the dataset state
+ ((ExternalDatasetDetails) dataset.getDatasetDetails())
+ .setState(TransactionState.COMMIT);
+ MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ } else if (datasetState == TransactionState.READY_TO_COMMIT) {
+ List<ExternalFile> files = MetadataManager.INSTANCE
+ .getDatasetExternalFiles(mdTxnCtx, dataset);
+ // if ready to commit, roll forward
+ // 1. commit indexes in NCs
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ JobSpecification jobSpec = ExternalIndexingOperations
+ .buildRecoverOp(dataset, indexes, metadataProvider);
+ executeHyracksJob(jobSpec);
+ // 2. add pending files in metadata
+ for (ExternalFile file : files) {
+ if (file.getPendingOp() == ExternalFilePendingOp.ADD_OP) {
+ MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+ file.setPendingOp(ExternalFilePendingOp.NO_OP);
+ MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
+ } else if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
+ // find original file
+ for (ExternalFile originalFile : files) {
+ if (originalFile.getFileName().equals(file.getFileName())) {
+ MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+ file);
+ MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+ originalFile);
+ break;
}
- } else if (file
- .getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
- // find original file
- for (ExternalFile originalFile : files) {
- if (originalFile.getFileName()
- .equals(file.getFileName())) {
- MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
- file);
- MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
- originalFile);
- originalFile.setSize(file.getSize());
- MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
- originalFile);
- }
+ }
+ } else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
+ // find original file
+ for (ExternalFile originalFile : files) {
+ if (originalFile.getFileName().equals(file.getFileName())) {
+ MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+ file);
+ MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
+ originalFile);
+ originalFile.setSize(file.getSize());
+ MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
+ originalFile);
}
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
index aaf3d7a..9c24acf 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplication.java
@@ -28,11 +28,10 @@
import org.apache.asterix.app.nc.NCAppRuntimeContext;
import org.apache.asterix.app.replication.message.StartupTaskRequestMessage;
import org.apache.asterix.common.api.AsterixThreadFactory;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.config.AsterixExtension;
import org.apache.asterix.common.config.ClusterProperties;
import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.config.MetadataProperties;
import org.apache.asterix.common.config.NodeProperties;
@@ -61,7 +60,7 @@
private static final Logger LOGGER = Logger.getLogger(NCApplication.class.getName());
private INCServiceContext ncServiceCtx;
- private IAppRuntimeContext runtimeContext;
+ private INcApplicationContext runtimeContext;
private String nodeId;
private boolean stopInitiated = false;
private SystemState systemState;
@@ -176,8 +175,7 @@
@Override
public NodeCapacity getCapacity() {
- IPropertiesProvider propertiesProvider = runtimeContext;
- StorageProperties storageProperties = propertiesProvider.getStorageProperties();
+ StorageProperties storageProperties = runtimeContext.getStorageProperties();
// Deducts the reserved buffer cache size and memory component size from the maxium heap size,
// and deducts one core for processing heartbeats.
long memorySize = Runtime.getRuntime().maxMemory() - storageProperties.getBufferCacheSize()
@@ -261,7 +259,7 @@
}
@Override
- public IAppRuntimeContext getApplicationContext() {
+ public INcApplicationContext getApplicationContext() {
return runtimeContext;
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
index da93fb8..23de847 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/CCMessageBroker.java
@@ -21,8 +21,10 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.hyracks.api.messages.IMessage;
import org.apache.hyracks.api.util.JavaSerializationUtils;
import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -40,15 +42,16 @@
@Override
public void receivedMessage(IMessage message, String nodeId) throws Exception {
- IApplicationMessage absMessage = (IApplicationMessage) message;
+ ICcAddressedMessage msg = (ICcAddressedMessage) message;
if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Received message: " + absMessage);
+ LOGGER.info("Received message: " + msg);
}
- absMessage.handle(ccs);
+ ICcApplicationContext appCtx = (ICcApplicationContext) ccs.getApplicationContext();
+ msg.handle(appCtx);
}
@Override
- public void sendApplicationMessageToNC(IApplicationMessage msg, String nodeId) throws Exception {
+ public void sendApplicationMessageToNC(INcAddressedMessage msg, String nodeId) throws Exception {
INodeManager nodeManager = ccs.getNodeManager();
NodeControllerState state = nodeManager.getNodeControllerState(nodeId);
state.getNodeController().sendApplicationMessageToNC(JavaSerializationUtils.serialize(msg), null, nodeId);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
index 33f16da..a2a3460 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/MessagingChannelInterfaceFactory.java
@@ -26,7 +26,7 @@
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.memory.ConcurrentFramePool;
import org.apache.asterix.common.memory.FrameAction;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.hyracks.api.comm.IBufferAcceptor;
import org.apache.hyracks.api.comm.IBufferFactory;
import org.apache.hyracks.api.comm.IChannelControlBlock;
@@ -108,8 +108,8 @@
@Override
public void accept(ByteBuffer buffer) {
try {
- IApplicationMessage receivedMsg = (IApplicationMessage) JavaSerializationUtils
- .deserialize(buffer.array());
+ INcAddressedMessage receivedMsg =
+ (INcAddressedMessage) JavaSerializationUtils.deserialize(buffer.array());
// Queue the received message and free the network IO thread
messageBroker.queueReceivedMessage(receivedMsg);
} catch (ClassNotFoundException | IOException e) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
index f615138..630aabe 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
@@ -24,11 +24,12 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.memory.ConcurrentFramePool;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.hyracks.api.comm.IChannelControlBlock;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.messages.IMessage;
@@ -39,14 +40,14 @@
private static final Logger LOGGER = Logger.getLogger(NCMessageBroker.class.getName());
private final NodeControllerService ncs;
- private final IAppRuntimeContext appContext;
- private final LinkedBlockingQueue<IApplicationMessage> receivedMsgsQ;
+ private final INcApplicationContext appContext;
+ private final LinkedBlockingQueue<INcAddressedMessage> receivedMsgsQ;
private final ConcurrentFramePool messagingFramePool;
private final int maxMsgSize;
public NCMessageBroker(NodeControllerService ncs, MessagingProperties messagingProperties) {
this.ncs = ncs;
- appContext = (IAppRuntimeContext) ncs.getApplicationContext();
+ appContext = (INcApplicationContext) ncs.getApplicationContext();
maxMsgSize = messagingProperties.getFrameSize();
int messagingMemoryBudget = messagingProperties.getFrameSize() * messagingProperties.getFrameCount();
messagingFramePool = new ConcurrentFramePool(ncs.getId(), messagingMemoryBudget,
@@ -57,36 +58,36 @@
}
@Override
- public void sendMessageToCC(IApplicationMessage message) throws Exception {
+ public void sendMessageToCC(ICcAddressedMessage message) throws Exception {
ncs.sendApplicationMessageToCC(JavaSerializationUtils.serialize(message), null);
}
@Override
- public void sendMessageToNC(String nodeId, IApplicationMessage message)
+ public void sendMessageToNC(String nodeId, INcAddressedMessage message)
throws Exception {
IChannelControlBlock messagingChannel = ncs.getMessagingNetworkManager().getMessagingChannel(nodeId);
sendMessageToChannel(messagingChannel, message);
}
@Override
- public void queueReceivedMessage(IApplicationMessage msg) {
+ public void queueReceivedMessage(INcAddressedMessage msg) {
receivedMsgsQ.offer(msg);
}
@Override
public void receivedMessage(IMessage message, String nodeId) throws Exception {
- IApplicationMessage absMessage = (IApplicationMessage) message;
+ INcAddressedMessage absMessage = (INcAddressedMessage) message;
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Received message: " + absMessage);
}
- absMessage.handle(ncs);
+ absMessage.handle(appContext);
}
public ConcurrentFramePool getMessagingFramePool() {
return messagingFramePool;
}
- private void sendMessageToChannel(IChannelControlBlock ccb, IApplicationMessage msg) throws IOException {
+ private void sendMessageToChannel(IChannelControlBlock ccb, INcAddressedMessage msg) throws IOException {
byte[] serializedMsg = JavaSerializationUtils.serialize(msg);
if (serializedMsg.length > maxMsgSize) {
throw new HyracksDataException("Message exceded maximum size");
@@ -115,7 +116,7 @@
@Override
public void run() {
while (true) {
- IApplicationMessage msg = null;
+ INcAddressedMessage msg = null;
try {
msg = receivedMsgsQ.take();
//TODO add nodeId to IApplicationMessage and pass it
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
index 48fd782..039933a 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
@@ -34,7 +34,7 @@
}
public static JobSpecification dropDataverseJobSpec(Dataverse dataverse, MetadataProvider metadata) {
- JobSpecification jobSpec = RuntimeUtils.createJobSpecification();
+ JobSpecification jobSpec = RuntimeUtils.createJobSpecification(metadata.getApplicationContext());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadata.splitAndConstraints(dataverse.getDataverseName());
FileRemoveOperatorDescriptor frod =
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index 8b3b020..bffaeef 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -32,8 +32,8 @@
import org.apache.asterix.active.EntityId;
import org.apache.asterix.active.message.ActiveManagerMessage;
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
-import org.apache.asterix.common.config.CompilerProperties;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -50,7 +50,6 @@
import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
import org.apache.asterix.external.operators.FeedIntakeOperatorNodePushable;
import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
-import org.apache.asterix.external.util.FeedConstants;
import org.apache.asterix.external.util.FeedUtils;
import org.apache.asterix.external.util.FeedUtils.FeedRuntimeType;
import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
@@ -66,7 +65,6 @@
import org.apache.asterix.metadata.feeds.LocationConstraint;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
import org.apache.asterix.runtime.job.listener.MultiTransactionJobletEventListenerFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.asterix.runtime.utils.RuntimeUtils;
import org.apache.asterix.translator.CompiledStatements;
@@ -82,7 +80,6 @@
import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.constraints.Constraint;
import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
@@ -111,15 +108,13 @@
*/
public class FeedOperations {
- private static final CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
-
private FeedOperations() {
}
private static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed feed,
MetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
- spec.setFrameSize(compilerProperties.getFrameSize());
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+ spec.setFrameSize(metadataProvider.getApplicationContext().getCompilerProperties().getFrameSize());
IAdapterFactory adapterFactory;
IOperatorDescriptor feedIngestor;
AlgebricksPartitionConstraint ingesterPc;
@@ -136,8 +131,9 @@
return Pair.of(spec, adapterFactory);
}
- public static JobSpecification buildRemoveFeedStorageJob(Feed feed) throws AsterixException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ public static JobSpecification buildRemoveFeedStorageJob(MetadataProvider metadataProvider, Feed feed)
+ throws AsterixException {
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
AlgebricksAbsolutePartitionConstraint allCluster = ClusterStateManager.INSTANCE.getClusterLocations();
Set<String> nodes = new TreeSet<>();
for (String node : allCluster.getLocations()) {
@@ -168,8 +164,8 @@
List<Statement> statements = new ArrayList<>();
statements.add(dataverseDecl);
statements.add(subscribeStmt);
- IStatementExecutor translator =
- qtFactory.create(statements, sessionConfig, compilationProvider, storageComponentProvider);
+ IStatementExecutor translator = qtFactory.create(metadataProvider.getApplicationContext(), statements,
+ sessionConfig, compilationProvider, storageComponentProvider);
// configure the metadata provider
metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, "" + Boolean.TRUE);
metadataProvider.getConfig().put(FeedActivityDetails.FEED_POLICY_NAME, "" + subscribeStmt.getPolicy());
@@ -218,8 +214,8 @@
JobSpecification subJob = jobsList.get(iter1);
operatorIdMapping.clear();
Map<OperatorDescriptorId, IOperatorDescriptor> operatorsMap = subJob.getOperatorMap();
- FeedConnectionId feedConnectionId = new FeedConnectionId(ingestionOp.getEntityId(),
- feedConnections.get(iter1).getDatasetName());
+ FeedConnectionId feedConnectionId =
+ new FeedConnectionId(ingestionOp.getEntityId(), feedConnections.get(iter1).getDatasetName());
FeedPolicyEntity feedPolicyEntity =
FeedMetadataUtil.validateIfPolicyExists(curFeedConnection.getDataverseName(),
@@ -232,9 +228,8 @@
if (opDesc instanceof LSMTreeInsertDeleteOperatorDescriptor
&& ((LSMTreeInsertDeleteOperatorDescriptor) opDesc).isPrimary()) {
String operandId = ((LSMTreeInsertDeleteOperatorDescriptor) opDesc).getIndexName();
- metaOp = new FeedMetaOperatorDescriptor(jobSpec,
- feedConnectionId, opDesc, feedPolicyEntity.getProperties(), FeedRuntimeType.STORE,
- operandId);
+ metaOp = new FeedMetaOperatorDescriptor(jobSpec, feedConnectionId, opDesc,
+ feedPolicyEntity.getProperties(), FeedRuntimeType.STORE, operandId);
opId = metaOp.getOperatorId();
opDesc.setOperatorId(opId);
} else {
@@ -243,13 +238,12 @@
IPushRuntimeFactory[] runtimeFactories = algOp.getPipeline().getRuntimeFactories();
// Tweak AssignOp to work with messages
if (runtimeFactories[0] instanceof AssignRuntimeFactory && runtimeFactories.length > 1) {
- IConnectorDescriptor connectorDesc = subJob.getOperatorInputMap()
- .get(opDesc.getOperatorId()).get(0);
+ IConnectorDescriptor connectorDesc =
+ subJob.getOperatorInputMap().get(opDesc.getOperatorId()).get(0);
// anything on the network interface needs to be message compatible
if (connectorDesc instanceof MToNPartitioningConnectorDescriptor) {
- metaOp = new FeedMetaOperatorDescriptor(jobSpec,
- feedConnectionId, opDesc, feedPolicyEntity.getProperties(),
- FeedRuntimeType.COMPUTE, null);
+ metaOp = new FeedMetaOperatorDescriptor(jobSpec, feedConnectionId, opDesc,
+ feedPolicyEntity.getProperties(), FeedRuntimeType.COMPUTE, null);
opId = metaOp.getOperatorId();
opDesc.setOperatorId(opId);
}
@@ -279,9 +273,8 @@
}
// make connections between operators
- for (Entry<ConnectorDescriptorId,
- Pair<Pair<IOperatorDescriptor, Integer>,Pair<IOperatorDescriptor, Integer>>> entry :
- subJob.getConnectorOperatorMap().entrySet()) {
+ for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>,
+ Pair<IOperatorDescriptor, Integer>>> entry : subJob.getConnectorOperatorMap().entrySet()) {
ConnectorDescriptorId newId = connectorIdMapping.get(entry.getKey());
IConnectorDescriptor connDesc = jobSpec.getConnectorMap().get(newId);
Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
@@ -387,16 +380,16 @@
ingestionLocations), intakeInfo.getRight().getPartitionConstraint());
}
- public static void SendStopMessageToNode(EntityId feedId, String intakeNodeLocation, Integer partition)
- throws Exception {
+ public static void SendStopMessageToNode(ICcApplicationContext appCtx, EntityId feedId, String intakeNodeLocation,
+ Integer partition) throws Exception {
ActiveManagerMessage stopFeedMessage = new ActiveManagerMessage(ActiveManagerMessage.STOP_ACTIVITY, "SRC",
new ActiveRuntimeId(feedId, FeedIntakeOperatorNodePushable.class.getSimpleName(), partition));
- SendActiveMessage(stopFeedMessage, intakeNodeLocation);
+ SendActiveMessage(appCtx, stopFeedMessage, intakeNodeLocation);
}
- private static void SendActiveMessage(ActiveManagerMessage activeManagerMessage, String nodeId) throws Exception {
- ICCMessageBroker messageBroker =
- (ICCMessageBroker) AppContextInfo.INSTANCE.getCCServiceContext().getMessageBroker();
+ private static void SendActiveMessage(ICcApplicationContext appCtx, ActiveManagerMessage activeManagerMessage,
+ String nodeId) throws Exception {
+ ICCMessageBroker messageBroker = (ICCMessageBroker) appCtx.getServiceContext().getMessageBroker();
messageBroker.sendApplicationMessageToNC(activeManagerMessage, nodeId);
}
}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
index d766827..5445986 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
@@ -25,7 +25,6 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
import org.apache.asterix.runtime.operators.std.FlushDatasetOperatorDescriptor;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
@@ -46,7 +45,7 @@
public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider,
String dataverseName, String datasetName, String indexName) throws Exception {
- CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
+ CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties();
int frameSize = compilerProperties.getFrameSize();
JobSpecification spec = new JobSpecification(frameSize);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
index fe08b8c..8b1bbe0 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
@@ -29,6 +29,7 @@
import java.util.concurrent.ConcurrentHashMap;
import org.apache.asterix.api.http.server.ConnectorApiServlet;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.metadata.MetadataManager;
import org.apache.asterix.metadata.MetadataTransactionContext;
@@ -38,6 +39,7 @@
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.JSONDeserializerForTypes;
+import org.apache.asterix.test.runtime.ExecutionTestUtil;
import org.apache.asterix.test.runtime.SqlppExecutionTest;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.client.NodeControllerInfo;
@@ -63,9 +65,9 @@
public void testGet() throws Exception {
// Starts test asterixdb cluster.
SqlppExecutionTest.setUp();
-
// Configures a test connector api servlet.
- ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" });
+ ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" },
+ (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
Map<String, NodeControllerInfo> nodeMap = new HashMap<>();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
PrintWriter outputWriter = new PrintWriter(outputStream);
@@ -118,7 +120,8 @@
@Test
public void testFormResponseObject() throws Exception {
- ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" });
+ ConnectorApiServlet let = new ConnectorApiServlet(new ConcurrentHashMap<>(), new String[] { "/" },
+ (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext());
ObjectMapper om = new ObjectMapper();
ObjectNode actualResponse = om.createObjectNode();
FileSplit[] splits = new FileSplit[2];
@@ -168,7 +171,9 @@
private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
// Retrieves file splits of the dataset.
- MetadataProvider metadataProvider = new MetadataProvider(null, new StorageComponentProvider());
+ MetadataProvider metadataProvider = new MetadataProvider(
+ (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null,
+ new StorageComponentProvider());
try {
metadataProvider.setMetadataTxnContext(mdTxnCtx);
Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
index 52ac855..340b1ce 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
@@ -32,7 +32,7 @@
import org.apache.asterix.api.http.server.VersionApiServlet;
import org.apache.asterix.common.config.BuildProperties;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.asterix.test.runtime.SqlppExecutionTest;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.http.api.IServletRequest;
@@ -60,7 +60,7 @@
PrintWriter outputWriter = new PrintWriter(outputStream);
// Creates mocks.
- AppContextInfo mockCtx = mock(AppContextInfo.class);
+ CcApplicationContext mockCtx = mock(CcApplicationContext.class);
IServletRequest mockRequest = mock(IServletRequest.class);
IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
IServletResponse mockResponse = mock(IServletResponse.class);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 4b2af80..3c2bee1 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -51,6 +51,7 @@
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.asterix.runtime.utils.RuntimeComponentsProvider;
import org.apache.asterix.test.runtime.ExecutionTestUtil;
import org.apache.asterix.transaction.management.opcallbacks.AbstractIndexModificationOperationCallback.Operation;
@@ -299,7 +300,9 @@
Dataverse dataverse = new Dataverse(dataset.getDataverseName(), NonTaggedDataFormat.class.getName(),
MetadataUtil.PENDING_NO_OP);
Index index = primaryIndexInfo.getIndex();
- MetadataProvider mdProvider = new MetadataProvider(dataverse, storageComponentProvider);
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
+ MetadataProvider mdProvider = new MetadataProvider(appCtx, dataverse, storageComponentProvider);
try {
return dataset.getIndexDataflowHelperFactory(mdProvider, index, primaryIndexInfo.recordType,
primaryIndexInfo.metaType, primaryIndexInfo.mergePolicyFactory,
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java
index 5cd8a63..e2885b3 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/aql/translator/QueryTranslatorTest.java
@@ -37,7 +37,7 @@
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.statement.RunStatement;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.runtime.utils.CcApplicationContext;
import org.apache.asterix.translator.IStatementExecutor;
import org.apache.asterix.translator.SessionConfig;
import org.junit.Assert;
@@ -55,8 +55,7 @@
RunStatement mockRunStatement = mock(RunStatement.class);
// Mocks AppContextInfo.
- AppContextInfo mockAsterixAppContextInfo = mock(AppContextInfo.class);
- setFinalStaticField(AppContextInfo.class.getDeclaredField("INSTANCE"), mockAsterixAppContextInfo);
+ CcApplicationContext mockAsterixAppContextInfo = mock(CcApplicationContext.class);
ExternalProperties mockAsterixExternalProperties = mock(ExternalProperties.class);
when(mockAsterixAppContextInfo.getExternalProperties()).thenReturn(mockAsterixExternalProperties);
when(mockAsterixExternalProperties.getAPIServerPort()).thenReturn(19002);
@@ -70,8 +69,8 @@
when(mockCluster.getMasterNode()).thenReturn(mockMasterNode);
when(mockMasterNode.getClientIp()).thenReturn("127.0.0.1");
- IStatementExecutor aqlTranslator = new DefaultStatementExecutorFactory().create(statements, mockSessionConfig,
- new AqlCompilationProvider(), new StorageComponentProvider());
+ IStatementExecutor aqlTranslator = new DefaultStatementExecutorFactory().create(mockAsterixAppContextInfo,
+ statements, mockSessionConfig, new AqlCompilationProvider(), new StorageComponentProvider());
List<String> parameters = new ArrayList<>();
parameters.add("examples/pregelix-example-jar-with-dependencies.jar");
parameters.add("org.apache.pregelix.example.PageRankVertex");
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
index 4702b1d..7818d13 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
@@ -28,6 +28,7 @@
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
import org.apache.asterix.api.java.AsterixJavaClient;
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.file.StorageComponentProvider;
@@ -61,7 +62,9 @@
integrationUtil.init(true);
Reader loadReader = new BufferedReader(
new InputStreamReader(new FileInputStream(LOAD_FOR_ENLIST_FILE), "UTF-8"));
- AsterixJavaClient asterixLoad = new AsterixJavaClient(integrationUtil.getHyracksClientConnection(), loadReader,
+ AsterixJavaClient asterixLoad =
+ new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(),
+ integrationUtil.getHyracksClientConnection(), loadReader,
ERR, new AqlCompilationProvider(), new DefaultStatementExecutorFactory(), new StorageComponentProvider());
try {
asterixLoad.compile(true, false, false, false, false, true, false);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
index 15581c3..409bbdc 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
@@ -33,6 +33,7 @@
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
@@ -65,8 +66,8 @@
private static final String EXTENSION_RESULT = "plan";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
- private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
- + "optimizerts" + SEPARATOR;
+ private static final String PATH_BASE =
+ "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "optimizerts" + SEPARATOR;
private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
protected static final String PATH_ACTUAL = "target" + File.separator + "opttest" + SEPARATOR;
@@ -152,8 +153,8 @@
@Test
public void test() throws Exception {
try {
- String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0),
- '/');
+ String queryFileShort =
+ queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0), '/');
if (!only.isEmpty()) {
boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
if (!toRun) {
@@ -175,14 +176,15 @@
actualFile.getParentFile().mkdirs();
PrintWriter plan = new PrintWriter(actualFile);
- ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider
- : sqlppCompilationProvider;
+ ILangCompilationProvider provider =
+ queryFile.getName().endsWith("aql") ? aqlCompilationProvider : sqlppCompilationProvider;
if (extensionLangCompilationProvider != null) {
provider = extensionLangCompilationProvider;
}
IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
- AsterixJavaClient asterix = new AsterixJavaClient(hcc, query, plan, provider, statementExecutorFactory,
- storageComponentProvider);
+ AsterixJavaClient asterix =
+ new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc,
+ query, plan, provider, statementExecutorFactory, storageComponentProvider);
try {
asterix.compile(true, false, false, true, true, false, false);
} catch (AsterixException e) {
@@ -193,10 +195,10 @@
plan.close();
query.close();
- BufferedReader readerExpected = new BufferedReader(
- new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
- BufferedReader readerActual = new BufferedReader(
- new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
+ BufferedReader readerExpected =
+ new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
+ BufferedReader readerActual =
+ new BufferedReader(new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
String lineExpected, lineActual;
int num = 1;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
index 8714319..5c2d263 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
@@ -26,12 +26,12 @@
import java.util.logging.Logger;
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.IdentitiyResolverFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.testframework.xml.TestGroup;
import org.apache.asterix.testframework.xml.TestSuite;
import org.apache.hyracks.control.nc.NodeControllerService;
@@ -89,10 +89,10 @@
List<ILibraryManager> libraryManagers = new ArrayList<>();
// Adds the library manager for CC.
- libraryManagers.add(AppContextInfo.INSTANCE.getLibraryManager());
+ libraryManagers.add(((ICcApplicationContext) integrationUtil.cc.getApplicationContext()).getLibraryManager());
// Adds library managers for NCs, one-per-NC.
for (NodeControllerService nc : integrationUtil.ncs) {
- IAppRuntimeContext runtimeCtx = (IAppRuntimeContext) nc.getApplicationContext();
+ INcApplicationContext runtimeCtx = (INcApplicationContext) nc.getApplicationContext();
libraryManagers.add(runtimeCtx.getLibraryManager());
}
return libraryManagers;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IApplicationContext.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IApplicationContext.java
new file mode 100644
index 0000000..0aea84d
--- /dev/null
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IApplicationContext.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.common.api;
+
+import org.apache.asterix.common.config.ActiveProperties;
+import org.apache.asterix.common.config.BuildProperties;
+import org.apache.asterix.common.config.CompilerProperties;
+import org.apache.asterix.common.config.ExternalProperties;
+import org.apache.asterix.common.config.MessagingProperties;
+import org.apache.asterix.common.config.MetadataProperties;
+import org.apache.asterix.common.config.NodeProperties;
+import org.apache.asterix.common.config.ReplicationProperties;
+import org.apache.asterix.common.config.StorageProperties;
+import org.apache.asterix.common.config.TransactionProperties;
+import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.hyracks.api.application.IServiceContext;
+
+public interface IApplicationContext {
+
+ StorageProperties getStorageProperties();
+
+ TransactionProperties getTransactionProperties();
+
+ CompilerProperties getCompilerProperties();
+
+ MetadataProperties getMetadataProperties();
+
+ ExternalProperties getExternalProperties();
+
+ ActiveProperties getActiveProperties();
+
+ BuildProperties getBuildProperties();
+
+ ReplicationProperties getReplicationProperties();
+
+ MessagingProperties getMessagingProperties();
+
+ NodeProperties getNodeProperties();
+
+ /**
+ * @return the library manager which implements {@link org.apache.asterix.common.library.ILibraryManager}
+ */
+ public ILibraryManager getLibraryManager();
+
+ IServiceContext getServiceContext();
+
+}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IAppRuntimeContext.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INcApplicationContext.java
similarity index 94%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IAppRuntimeContext.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INcApplicationContext.java
index da4da6b..5e69746 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IAppRuntimeContext.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/INcApplicationContext.java
@@ -22,16 +22,15 @@
import java.rmi.RemoteException;
import java.util.concurrent.Executor;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.common.replication.IRemoteRecoveryManager;
import org.apache.asterix.common.replication.IReplicaResourcesManager;
import org.apache.asterix.common.replication.IReplicationChannel;
import org.apache.asterix.common.replication.IReplicationManager;
import org.apache.asterix.common.transactions.ITransactionSubsystem;
+import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -42,7 +41,7 @@
import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
import org.apache.hyracks.storage.common.file.IResourceIdFactory;
-public interface IAppRuntimeContext extends IPropertiesProvider {
+public interface INcApplicationContext extends IApplicationContext {
IIOManager getIOManager();
@@ -88,8 +87,6 @@
IReplicationChannel getReplicationChannel();
- ILibraryManager getLibraryManager();
-
/**
* Exports the metadata node to the metadata RMI port.
*
@@ -116,4 +113,7 @@
* @return instance of {@link org.apache.asterix.common.context.IStorageComponentProvider}
*/
IStorageComponentProvider getStorageComponentProvider();
+
+ @Override
+ INCServiceContext getServiceContext();
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java
index b54bb39..6211af4 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/cluster/IGlobalRecoveryManager.java
@@ -19,11 +19,12 @@
package org.apache.asterix.common.cluster;
import org.apache.asterix.common.api.IClusterEventsSubscriber;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
public interface IGlobalRecoveryManager extends IClusterEventsSubscriber {
/**
* Starts the global recovery process if the cluster state changed to ACTIVE.
*/
- public void startGlobalRecovery();
+ public void startGlobalRecovery(ICcApplicationContext appCtx);
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
deleted file mode 100644
index c1264a9..0000000
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/IPropertiesProvider.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.common.config;
-
-public interface IPropertiesProvider {
- StorageProperties getStorageProperties();
-
- TransactionProperties getTransactionProperties();
-
- CompilerProperties getCompilerProperties();
-
- MetadataProperties getMetadataProperties();
-
- ExternalProperties getExternalProperties();
-
- ActiveProperties getActiveProperties();
-
- BuildProperties getBuildProperties();
-
- ReplicationProperties getReplicationProperties();
-
- MessagingProperties getMessagingProperties();
-
- NodeProperties getNodeProperties();
-}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
index 7f3f6aa..f122096 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/AsterixVirtualBufferCacheProvider.java
@@ -20,7 +20,7 @@
import java.util.List;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
@@ -56,7 +56,7 @@
deviceId = i;
}
}
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getDatasetLifecycleManager().getVirtualBufferCaches(datasetID, deviceId);
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java
index d9381a2..cec9f57 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/CorrelatedPrefixMergePolicyFactory.java
@@ -24,7 +24,7 @@
import java.util.Map;
import java.util.Set;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.storage.am.common.api.IResourceLifecycleManager;
@@ -43,7 +43,7 @@
@Override
public ILSMMergePolicy createMergePolicy(Map<String, String> properties, IHyracksTaskContext ctx) {
- IDatasetLifecycleManager dslcManager = ((IAppRuntimeContext) ctx.getJobletContext()
+ IDatasetLifecycleManager dslcManager = ((INcApplicationContext) ctx.getJobletContext()
.getServiceContext().getApplicationContext()).getDatasetLifecycleManager();
ILSMMergePolicy policy = new CorrelatedPrefixMergePolicy(dslcManager, datasetID);
policy.configure(properties);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java
index 5dab970..0a40058 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/TransactionSubsystemProvider.java
@@ -19,7 +19,7 @@
package org.apache.asterix.common.context;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.transactions.ITransactionSubsystem;
import org.apache.hyracks.api.context.IHyracksTaskContext;
@@ -37,8 +37,8 @@
@Override
public ITransactionSubsystem getTransactionSubsystem(IHyracksTaskContext ctx) {
- IAppRuntimeContext appCtx =
- (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+ INcApplicationContext appCtx =
+ (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
return appCtx.getTransactionSubsystem();
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/IApplicationContextInfo.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
similarity index 76%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/IApplicationContextInfo.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
index 3c5328d..a9e6448 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/IApplicationContextInfo.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/ICcApplicationContext.java
@@ -18,9 +18,12 @@
*/
package org.apache.asterix.common.dataflow;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
-import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.transactions.IResourceIdManager;
import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.IJobLifecycleListener;
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.common.IStorageManager;
@@ -33,7 +36,7 @@
* and {@link org.apache.asterix.common.library.ILibraryManager}
* at the cluster controller side.
*/
-public interface IApplicationContextInfo {
+public interface ICcApplicationContext extends IApplicationContext {
/**
* Returns an instance of the implementation for IIndexLifecycleManagerProvider.
@@ -50,7 +53,8 @@
/**
* @return an instance which implements {@link org.apache.hyracks.api.application.ICCServiceContext}
*/
- public ICCServiceContext getCCServiceContext();
+ @Override
+ public ICCServiceContext getServiceContext();
/**
* @return the global recovery manager which implements
@@ -59,7 +63,14 @@
public IGlobalRecoveryManager getGlobalRecoveryManager();
/**
- * @return the library manager which implements {@link org.apache.asterix.common.library.ILibraryManager}
+ * @return the active lifecycle listener at Cluster controller
*/
- public ILibraryManager getLibraryManager();
+ public IJobLifecycleListener getActiveLifecycleListener();
+
+ /**
+ * @return a new instance of {@link IHyracksClientConnection}
+ */
+ public IHyracksClientConnection getHcc();
+
+ public IResourceIdManager getResourceIdManager();
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java
index 2eca55d..4b70dd7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/dataflow/LSMInsertDeleteOperatorNodePushable.java
@@ -20,7 +20,7 @@
import java.nio.ByteBuffer;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.transactions.ILogMarkerCallback;
import org.apache.asterix.common.transactions.PrimaryIndexLogMarkerCallback;
import org.apache.hyracks.api.comm.VSizeFrame;
@@ -93,8 +93,8 @@
tupleFilter = tupleFilterFactory.createTupleFilter(ctx);
frameTuple = new FrameTupleReference();
}
- IAppRuntimeContext runtimeCtx =
- (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+ INcApplicationContext runtimeCtx =
+ (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
LSMIndexUtil.checkAndSetFirstLSN(lsmIndex, runtimeCtx.getTransactionSubsystem().getLogManager());
} catch (Throwable th) {
throw new HyracksDataException(th);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java
index ced2b6d..b2fde52 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICCMessageBroker.java
@@ -29,5 +29,5 @@
* @param nodeId
* @throws Exception
*/
- public void sendApplicationMessageToNC(IApplicationMessage msg, String nodeId) throws Exception;
+ public void sendApplicationMessageToNC(INcAddressedMessage msg, String nodeId) throws Exception;
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICcAddressedMessage.java
similarity index 82%
copy from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
copy to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICcAddressedMessage.java
index 6e8c4cf..37cba9c 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/ICcAddressedMessage.java
@@ -18,15 +18,16 @@
*/
package org.apache.asterix.common.messaging.api;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.messages.IMessage;
-import org.apache.hyracks.api.service.IControllerService;
@FunctionalInterface
-public interface IApplicationMessage extends IMessage {
+public interface ICcAddressedMessage extends IMessage {
/**
* handle the message upon delivery
*/
- void handle(IControllerService cs) throws HyracksDataException, InterruptedException;
+ void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException;
+
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
index 707f864..e1101b3 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
@@ -29,7 +29,7 @@
* @param callback
* @throws Exception
*/
- public void sendMessageToCC(IApplicationMessage message) throws Exception;
+ public void sendMessageToCC(ICcAddressedMessage message) throws Exception;
/**
* Sends application message from this NC to another NC.
@@ -38,7 +38,7 @@
* @param callback
* @throws Exception
*/
- public void sendMessageToNC(String nodeId, IApplicationMessage message)
+ public void sendMessageToNC(String nodeId, INcAddressedMessage message)
throws Exception;
/**
@@ -46,5 +46,5 @@
*
* @param msg
*/
- public void queueReceivedMessage(IApplicationMessage msg);
+ public void queueReceivedMessage(INcAddressedMessage msg);
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INcAddressedMessage.java
similarity index 82%
rename from asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
rename to asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INcAddressedMessage.java
index 6e8c4cf..0eef06c 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/IApplicationMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INcAddressedMessage.java
@@ -18,15 +18,16 @@
*/
package org.apache.asterix.common.messaging.api;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.messages.IMessage;
-import org.apache.hyracks.api.service.IControllerService;
@FunctionalInterface
-public interface IApplicationMessage extends IMessage {
+public interface INcAddressedMessage extends IMessage {
/**
* handle the message upon delivery
*/
- void handle(IControllerService cs) throws HyracksDataException, InterruptedException;
+ void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException;
+
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java
index 46d5d98..5c286cc 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/IFaultToleranceStrategy.java
@@ -19,7 +19,7 @@
package org.apache.asterix.common.replication;
import org.apache.asterix.common.cluster.IClusterStateManager;
-import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public interface IFaultToleranceStrategy {
@@ -62,6 +62,6 @@
* @param messageBroker
* @return
*/
- IFaultToleranceStrategy from(IReplicationStrategy replicationStrategy, ICCMessageBroker messageBroker);
+ IFaultToleranceStrategy from(ICCServiceContext serviceCtx, IReplicationStrategy replicationStrategy);
}
\ No newline at end of file
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java
index c19b0aa..87b0856 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/replication/INCLifecycleMessage.java
@@ -18,9 +18,9 @@
*/
package org.apache.asterix.common.replication;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.hyracks.api.messages.IMessage;
-public interface INCLifecycleMessage extends IApplicationMessage {
+public interface INCLifecycleMessage extends IMessage {
public enum MessageType {
REPLAY_LOGS_REQUEST,
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java
index 4ff1f47..c27d2de 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/transactions/IAppRuntimeContextProvider.java
@@ -19,7 +19,7 @@
package org.apache.asterix.common.transactions;
import org.apache.asterix.common.api.ThreadExecutor;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.hyracks.api.io.IIOManager;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
@@ -50,5 +50,5 @@
public IIOManager getIOManager();
- public IAppRuntimeContext getAppContext();
+ public INcApplicationContext getAppContext();
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index 46da770..2eb81d4 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -23,7 +23,8 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.IApplicationContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.external.api.IDataFlowController;
@@ -47,6 +48,8 @@
import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.INCServiceContext;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileSplit;
@@ -88,10 +91,11 @@
@Override
public synchronized IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition)
throws HyracksDataException {
- IAppRuntimeContext appCtx =
- (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+ INCServiceContext serviceCtx = ctx.getJobletContext().getServiceContext();
+ INcApplicationContext appCtx =
+ (INcApplicationContext) serviceCtx.getApplicationContext();
try {
- restoreExternalObjects(appCtx.getLibraryManager());
+ restoreExternalObjects(serviceCtx, appCtx.getLibraryManager());
} catch (Exception e) {
LOGGER.log(Level.INFO, "Failure restoring external objects", e);
throw HyracksDataException.create(e);
@@ -111,7 +115,7 @@
}
}
- private void restoreExternalObjects(ILibraryManager libraryManager)
+ private void restoreExternalObjects(IServiceContext serviceContext, ILibraryManager libraryManager)
throws HyracksDataException, AlgebricksException {
if (dataSourceFactory == null) {
dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(libraryManager, configuration);
@@ -119,7 +123,7 @@
if (dataSourceFactory.isIndexible() && (files != null)) {
((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
}
- dataSourceFactory.configure(configuration);
+ dataSourceFactory.configure(serviceContext, configuration);
}
if (dataParserFactory == null) {
// create and configure parser factory
@@ -131,17 +135,19 @@
}
@Override
- public void configure(ILibraryManager libraryManager, Map<String, String> configuration)
+ public void configure(IServiceContext serviceContext, Map<String, String> configuration)
throws HyracksDataException, AlgebricksException {
this.configuration = configuration;
+ IApplicationContext appCtx = (IApplicationContext) serviceContext.getApplicationContext();
ExternalDataUtils.validateDataSourceParameters(configuration);
- dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(libraryManager, configuration);
+ dataSourceFactory =
+ DatasourceFactoryProvider.getExternalDataSourceFactory(appCtx.getLibraryManager(), configuration);
if (dataSourceFactory.isIndexible() && (files != null)) {
((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
}
- dataSourceFactory.configure(configuration);
+ dataSourceFactory.configure(serviceContext, configuration);
ExternalDataUtils.validateDataParserParameters(configuration);
- dataParserFactory = ParserFactoryProvider.getDataParserFactory(libraryManager, configuration);
+ dataParserFactory = ParserFactoryProvider.getDataParserFactory(appCtx.getLibraryManager(), configuration);
dataParserFactory.setRecordType(recordType);
dataParserFactory.setMetaType(metaType);
dataParserFactory.configure(configuration);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
index 6b69d9c..a31b46d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/LookupAdapterFactory.java
@@ -21,7 +21,7 @@
import java.io.Serializable;
import java.util.Map;
-import org.apache.asterix.common.library.ILibraryManager;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.external.api.ILookupReaderFactory;
import org.apache.asterix.external.api.ILookupRecordReader;
import org.apache.asterix.external.api.IRecordDataParser;
@@ -34,6 +34,7 @@
import org.apache.asterix.external.provider.ParserFactoryProvider;
import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IMissingWriterFactory;
@@ -66,8 +67,8 @@
ExternalFileIndexAccessor snapshotAccessor, IFrameWriter writer) throws HyracksDataException {
try {
IRecordDataParser<T> dataParser = dataParserFactory.createRecordParser(ctx);
- ILookupRecordReader<? extends T> reader = readerFactory.createRecordReader(ctx, partition,
- snapshotAccessor);
+ ILookupRecordReader<? extends T> reader =
+ readerFactory.createRecordReader(ctx, partition, snapshotAccessor);
reader.configure(configuration);
RecordIdReader ridReader = RecordIdReaderFactory.create(configuration, ridFields);
return new LookupAdapter<>(dataParser, reader, inRecDesc, ridReader, retainInput, retainMissing,
@@ -77,14 +78,15 @@
}
}
- public void configure(ILibraryManager libraryManager, Map<String, String> configuration)
+ public void configure(IServiceContext serviceContext, Map<String, String> configuration)
throws HyracksDataException, AlgebricksException {
this.configuration = configuration;
- readerFactory = LookupReaderFactoryProvider.getLookupReaderFactory(configuration);
- dataParserFactory = (IRecordDataParserFactory<T>) ParserFactoryProvider.getDataParserFactory(libraryManager,
- configuration);
+ IApplicationContext appCtx = (IApplicationContext) serviceContext.getApplicationContext();
+ readerFactory = LookupReaderFactoryProvider.getLookupReaderFactory(serviceContext, configuration);
+ dataParserFactory = (IRecordDataParserFactory<T>) ParserFactoryProvider
+ .getDataParserFactory(appCtx.getLibraryManager(), configuration);
dataParserFactory.setRecordType(recordType);
- readerFactory.configure(configuration);
+ readerFactory.configure(serviceContext, configuration);
dataParserFactory.configure(configuration);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
index df1b43e..40bc7d8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IAdapterFactory.java
@@ -21,10 +21,10 @@
import java.io.Serializable;
import java.util.Map;
-import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -70,12 +70,12 @@
/**
* Configure the adapter
*
- * @param libraryManager
+ * @param serviceContext
* @param configuration
* @throws AlgebricksException
* @throws HyracksDataException
*/
- void configure(ILibraryManager libraryManager, Map<String, String> configuration)
+ void configure(IServiceContext serviceContext, Map<String, String> configuration)
throws HyracksDataException, AlgebricksException;
/**
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
index 5538369..edda448 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IExternalDataSourceFactory.java
@@ -24,11 +24,12 @@
import java.util.Map;
import java.util.Set;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public interface IExternalDataSourceFactory extends Serializable {
@@ -62,7 +63,8 @@
* @param configuration
* @throws AsterixException
*/
- public void configure(Map<String, String> configuration) throws AlgebricksException, HyracksDataException;
+ public void configure(IServiceContext ctx, Map<String, String> configuration)
+ throws AlgebricksException, HyracksDataException;
/**
* Specify whether the external data source can be indexed
@@ -82,11 +84,11 @@
* @return
* @throws AlgebricksException
*/
- public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(
+ public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(IApplicationContext appCtx,
AlgebricksAbsolutePartitionConstraint constraints, int count) throws AlgebricksException {
if (constraints == null) {
ArrayList<String> locs = new ArrayList<>();
- Set<String> stores = AppContextInfo.INSTANCE.getMetadataProperties().getStores().keySet();
+ Set<String> stores = appCtx.getMetadataProperties().getStores().keySet();
if (stores.isEmpty()) {
throw new AlgebricksException("Configurations don't have any stores");
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java
index b0f1ae7..99ffdf1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/INodeResolver.java
@@ -18,11 +18,13 @@
*/
package org.apache.asterix.external.api;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
/**
* A policy for resolving a name to a node controller id.
*/
+@FunctionalInterface
public interface INodeResolver {
/**
@@ -33,5 +35,5 @@
* @return resolved result (a node controller id)
* @throws AsterixException
*/
- public String resolveNode(String value) throws AsterixException;
+ String resolveNode(ICcApplicationContext appCtx, String value) throws AsterixException;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java
index f49da3c..6f3b667 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/management/FeedEventsListener.java
@@ -25,15 +25,15 @@
import java.util.logging.Logger;
import org.apache.asterix.active.ActiveEvent;
-import org.apache.asterix.active.ActiveJobNotificationHandler;
+import org.apache.asterix.active.ActiveLifecycleListener;
import org.apache.asterix.active.ActivityState;
import org.apache.asterix.active.EntityId;
import org.apache.asterix.active.IActiveEventSubscriber;
import org.apache.asterix.active.message.ActivePartitionMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.metadata.IDataset;
import org.apache.asterix.external.feed.watch.FeedEventSubscriber;
import org.apache.asterix.external.feed.watch.NoOpSubscriber;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobStatus;
@@ -42,11 +42,14 @@
// constants
private static final Logger LOGGER = Logger.getLogger(FeedEventsListener.class.getName());
// members
+ private final ICcApplicationContext appCtx;
private final String[] sources;
private final List<IActiveEventSubscriber> subscribers;
private int numRegistered;
- public FeedEventsListener(EntityId entityId, List<IDataset> datasets, String[] sources) {
+ public FeedEventsListener(ICcApplicationContext appCtx, EntityId entityId, List<IDataset> datasets,
+ String[] sources) {
+ this.appCtx = appCtx;
this.entityId = entityId;
this.datasets = datasets;
this.sources = sources;
@@ -103,10 +106,11 @@
}
private void finish() throws Exception {
- IHyracksClientConnection hcc = AppContextInfo.INSTANCE.getHcc();
+ IHyracksClientConnection hcc = appCtx.getHcc();
JobStatus status = hcc.getJobStatus(jobId);
state = status.equals(JobStatus.FAILURE) ? ActivityState.FAILED : ActivityState.STOPPED;
- ActiveJobNotificationHandler.INSTANCE.removeListener(this);
+ ActiveLifecycleListener activeLcListener = (ActiveLifecycleListener) appCtx.getActiveLifecycleListener();
+ activeLcListener.getNotificationHandler().removeListener(this);
}
private void start(ActiveEvent event) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
index 3d1297d..b58feba 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
@@ -23,6 +23,7 @@
import java.util.List;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.external.api.AsterixInputStream;
import org.apache.asterix.external.api.IExternalIndexer;
@@ -46,6 +47,8 @@
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.hdfs.dataflow.ConfFactory;
@@ -56,6 +59,7 @@
protected static final long serialVersionUID = 1L;
protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+ protected transient IServiceContext serviceCtx;
protected String[] readSchedule;
protected boolean read[];
protected InputSplitsFactory inputSplitsFactory;
@@ -75,10 +79,11 @@
private Format format;
@Override
- public void configure(Map<String, String> configuration) throws AsterixException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
try {
- init();
+ this.serviceCtx = serviceCtx;
this.configuration = configuration;
+ init((ICCServiceContext) serviceCtx);
JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
confFactory = new ConfFactory(conf);
clusterLocations = getPartitionConstraint();
@@ -153,7 +158,8 @@
*/
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
- clusterLocations = HDFSUtils.getPartitionConstraints(clusterLocations);
+ clusterLocations = HDFSUtils.getPartitionConstraints((IApplicationContext) serviceCtx.getApplicationContext(),
+ clusterLocations);
return clusterLocations;
}
@@ -161,12 +167,12 @@
* This method initialize the scheduler which assigns responsibility of reading different logical input splits from
* HDFS
*/
- private static void init() throws HyracksDataException {
+ private static void init(ICCServiceContext serviceCtx) throws HyracksDataException {
if (!initialized) {
synchronized (initLock) {
if (!initialized) {
- hdfsScheduler = HDFSUtils.initializeHDFSScheduler();
- indexingScheduler = HDFSUtils.initializeIndexingHDFSScheduler();
+ hdfsScheduler = HDFSUtils.initializeHDFSScheduler(serviceCtx);
+ indexingScheduler = HDFSUtils.initializeIndexingHDFSScheduler(serviceCtx);
initialized = true;
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
index bfcacd8..98f78cc 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSLookupReaderFactory.java
@@ -21,6 +21,7 @@
import java.io.IOException;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.external.api.ILookupReaderFactory;
import org.apache.asterix.external.api.ILookupRecordReader;
@@ -30,6 +31,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.hdfs.dataflow.ConfFactory;
@@ -40,6 +42,7 @@
protected ConfFactory confFactory;
protected Map<String, String> configuration;
protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+ protected transient IServiceContext serviceCtx;
public HDFSLookupReaderFactory() {
}
@@ -51,12 +54,14 @@
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
- clusterLocations = HDFSUtils.getPartitionConstraints(clusterLocations);
+ clusterLocations = HDFSUtils.getPartitionConstraints((IApplicationContext) serviceCtx.getApplicationContext(),
+ clusterLocations);
return clusterLocations;
}
@Override
- public void configure(Map<String, String> configuration) throws AsterixException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
+ this.serviceCtx = serviceCtx;
this.configuration = configuration;
JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
try {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
index 2ded3fb..03200a9 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReaderFactory.java
@@ -23,12 +23,14 @@
import java.util.List;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.external.api.IExternalDataSourceFactory;
import org.apache.asterix.external.api.IRecordReader;
import org.apache.asterix.external.api.IRecordReaderFactory;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -37,8 +39,9 @@
public class RSSRecordReaderFactory implements IRecordReaderFactory<SyndEntryImpl> {
private static final long serialVersionUID = 1L;
- private final List<String> urls = new ArrayList<String>();
+ private final List<String> urls = new ArrayList<>();
private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+ private transient IServiceContext serviceContext;
@Override
public DataSourceType getDataSourceType() {
@@ -48,12 +51,14 @@
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
int count = urls.size();
- clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, count);
+ clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+ (IApplicationContext) serviceContext.getApplicationContext(), clusterLocations, count);
return clusterLocations;
}
@Override
- public void configure(Map<String, String> configuration) {
+ public void configure(IServiceContext serviceContext, Map<String, String> configuration) {
+ this.serviceContext = serviceContext;
String url = configuration.get(ExternalDataConstants.KEY_RSS_URL);
if (url == null) {
throw new IllegalArgumentException("no RSS URL provided");
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
index 9b23e38..7d75af8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
@@ -27,6 +27,7 @@
import org.apache.asterix.external.provider.StreamRecordReaderProvider.Format;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -57,9 +58,10 @@
}
@Override
- public void configure(Map<String, String> configuration) throws HyracksDataException, AlgebricksException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration)
+ throws HyracksDataException, AlgebricksException {
this.configuration = configuration;
- streamFactory.configure(configuration);
+ streamFactory.configure(serviceCtx, configuration);
format = StreamRecordReaderProvider.getReaderFormat(configuration);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
index 4d8be98..6ff0b6c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
@@ -22,6 +22,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.external.api.IExternalDataSourceFactory;
import org.apache.asterix.external.api.IRecordReader;
@@ -32,6 +33,7 @@
import org.apache.asterix.external.util.TwitterUtil.SearchAPIConstants;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -47,6 +49,7 @@
private Map<String, String> configuration;
private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+ private transient IServiceContext serviceCtx;
public static boolean isTwitterPull(Map<String, String> configuration) {
String reader = configuration.get(ExternalDataConstants.KEY_READER);
@@ -64,13 +67,16 @@
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
- clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, INTAKE_CARDINALITY);
+ clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+ (IApplicationContext) serviceCtx.getApplicationContext(),
+ clusterLocations, INTAKE_CARDINALITY);
return clusterLocations;
}
@Override
- public void configure(Map<String, String> configuration) throws AsterixException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
this.configuration = configuration;
+ this.serviceCtx = serviceCtx;
TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
if (!validateConfiguration(configuration)) {
StringBuilder builder = new StringBuilder();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
index a2e3704..44b0b43 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
@@ -26,6 +26,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.external.api.AsterixInputStream;
import org.apache.asterix.external.api.IInputStreamFactory;
@@ -37,6 +38,7 @@
import org.apache.asterix.external.util.FileSystemWatcher;
import org.apache.asterix.external.util.NodeResolverFactory;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.UnmanagedFileSplit;
@@ -83,10 +85,12 @@
}
@Override
- public void configure(Map<String, String> configuration) throws AsterixException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
this.configuration = configuration;
String[] splits = configuration.get(ExternalDataConstants.KEY_PATH).split(",");
- configureFileSplits(splits);
+ if (inputFileSplits == null) {
+ configureFileSplits((ICcApplicationContext) serviceCtx.getApplicationContext(), splits);
+ }
configurePartitionConstraint();
this.isFeed = ExternalDataUtils.isFeed(configuration) && ExternalDataUtils.keepDataSourceOpen(configuration);
this.expression = configuration.get(ExternalDataConstants.KEY_EXPRESSION);
@@ -97,25 +101,24 @@
return constraints;
}
- private void configureFileSplits(String[] splits) throws AsterixException {
+ private void configureFileSplits(ICcApplicationContext appCtx, String[] splits) throws AsterixException {
INodeResolver resolver = getNodeResolver();
- if (inputFileSplits == null) {
- inputFileSplits = new UnmanagedFileSplit[splits.length];
- String node;
- String path;
- int count = 0;
- String trimmedValue;
- for (String splitPath : splits) {
- trimmedValue = splitPath.trim();
- if (!trimmedValue.contains("://")) {
- throw new AsterixException(
- "Invalid path: " + splitPath + "\nUsage- path=\"Host://Absolute File Path\"");
- }
- node = resolver.resolveNode(trimmedValue.split(":")[0]);
- path = trimmedValue.split("://")[1];
- inputFileSplits[count++] = new UnmanagedFileSplit(node, path);
+ inputFileSplits = new UnmanagedFileSplit[splits.length];
+ String node;
+ String path;
+ int count = 0;
+ String trimmedValue;
+ for (String splitPath : splits) {
+ trimmedValue = splitPath.trim();
+ if (!trimmedValue.contains("://")) {
+ throw new AsterixException(
+ "Invalid path: " + splitPath + "\nUsage- path=\"Host://Absolute File Path\"");
}
+ node = resolver.resolveNode(appCtx, trimmedValue.split(":")[0]);
+ path = trimmedValue.split("://")[1];
+ inputFileSplits[count++] = new UnmanagedFileSplit(node, path);
}
+
}
private void configurePartitionConstraint() throws AsterixException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java
index 8ab8ead..9a0e718 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketClientInputStreamFactory.java
@@ -25,6 +25,7 @@
import java.util.List;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.external.api.AsterixInputStream;
import org.apache.asterix.external.api.IExternalDataSourceFactory;
@@ -35,25 +36,29 @@
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
public class SocketClientInputStreamFactory implements IInputStreamFactory {
private static final long serialVersionUID = 1L;
+ private transient IServiceContext serviceCtx;
private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
private List<Pair<String, Integer>> sockets;
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
- clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, sockets.size());
+ clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+ (IApplicationContext) serviceCtx.getApplicationContext(), clusterLocations, sockets.size());
return clusterLocations;
}
@Override
- public void configure(Map<String, String> configuration) throws AsterixException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration) throws AsterixException {
try {
- this.sockets = new ArrayList<Pair<String, Integer>>();
+ this.serviceCtx = serviceCtx;
+ this.sockets = new ArrayList<>();
String socketsValue = configuration.get(ExternalDataConstants.KEY_SOCKETS);
if (socketsValue == null) {
throw new IllegalArgumentException(
@@ -66,7 +71,7 @@
int port = Integer.parseInt(socketTokens[1].trim());
InetAddress[] resolved;
resolved = SystemDefaultDnsResolver.INSTANCE.resolve(host);
- Pair<String, Integer> p = new Pair<String, Integer>(resolved[0].getHostAddress(), port);
+ Pair<String, Integer> p = new Pair<>(resolved[0].getHostAddress(), port);
sockets.add(p);
}
} catch (UnknownHostException e) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java
index f8aac81..05931b2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/SocketServerInputStreamFactory.java
@@ -29,6 +29,7 @@
import java.util.Random;
import java.util.Set;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
@@ -39,8 +40,8 @@
import org.apache.asterix.runtime.utils.RuntimeUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -56,9 +57,10 @@
}
@Override
- public void configure(Map<String, String> configuration) throws AlgebricksException {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration)
+ throws AsterixException, CompilationException {
try {
- sockets = new ArrayList<Pair<String, Integer>>();
+ sockets = new ArrayList<>();
String modeValue = configuration.get(ExternalDataConstants.KEY_MODE);
if (modeValue != null) {
mode = Mode.valueOf(modeValue.trim().toUpperCase());
@@ -68,8 +70,9 @@
throw new CompilationException(ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_NOT_PROPERLY_CONFIGURED);
}
Map<InetAddress, Set<String>> ncMap;
- ncMap = RuntimeUtils.getNodeControllerMap();
- List<String> ncs = RuntimeUtils.getAllNodeControllers();
+ ncMap = RuntimeUtils.getNodeControllerMap((ICcApplicationContext) serviceCtx.getApplicationContext());
+ List<String> ncs =
+ RuntimeUtils.getAllNodeControllers((ICcApplicationContext) serviceCtx.getApplicationContext());
String[] socketsArray = socketsValue.split(",");
Random random = new Random();
for (String socket : socketsArray) {
@@ -87,11 +90,11 @@
}
String[] ncArray = ncsOnIp.toArray(new String[] {});
String nc = ncArray[random.nextInt(ncArray.length)];
- p = new Pair<String, Integer>(nc, port);
+ p = new Pair<>(nc, port);
break;
case NC:
- p = new Pair<String, Integer>(host, port);
+ p = new Pair<>(host, port);
if (!ncs.contains(host)) {
throw new CompilationException(
ErrorCode.FEED_METADATA_SOCKET_ADAPTOR_SOCKET_INVALID_HOST_NC, "NC", host,
@@ -127,7 +130,7 @@
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
- List<String> locations = new ArrayList<String>();
+ List<String> locations = new ArrayList<>();
for (Pair<String, Integer> socket : sockets) {
locations.add(socket.first);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java
index 936f1f8..b32006c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamFactory.java
@@ -28,6 +28,7 @@
import org.apache.asterix.external.input.stream.TwitterFirehoseInputStream;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -82,7 +83,7 @@
}
@Override
- public void configure(Map<String, String> configuration) {
+ public void configure(IServiceContext serviceCtx, Map<String, String> configuration) {
this.configuration = configuration;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
index 866fd9c..493a0bf 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
@@ -20,7 +20,7 @@
import java.io.IOException;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.library.ILibraryManager;
@@ -32,7 +32,6 @@
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.EnumDeserializer;
import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
@@ -56,7 +55,8 @@
protected final IScalarEvaluator[] argumentEvaluators;
protected final JavaFunctionHelper functionHelper;
- public ExternalFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[], IHyracksTaskContext context)
+ public ExternalFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[], IHyracksTaskContext context,
+ IApplicationContext appCtx)
throws HyracksDataException {
this.finfo = finfo;
this.evaluatorFactories = args;
@@ -69,16 +69,7 @@
String[] fnameComponents = finfo.getFunctionIdentifier().getName().split("#");
String functionLibary = fnameComponents[0];
String dataverse = finfo.getFunctionIdentifier().getNamespace();
- ILibraryManager libraryManager;
- if (context == null) {
- // Gets the library manager for compile-time constant folding.
- libraryManager = AppContextInfo.INSTANCE.getLibraryManager();
- } else {
- // Gets the library manager for real runtime evaluation.
- IAppRuntimeContext runtimeCtx = (IAppRuntimeContext) context.getJobletContext()
- .getServiceContext().getApplicationContext();
- libraryManager = runtimeCtx.getLibraryManager();
- }
+ ILibraryManager libraryManager = appCtx.getLibraryManager();
ClassLoader libraryClassLoader = libraryManager.getLibraryClassLoader(dataverse, functionLibary);
String classname = finfo.getFunctionBody().trim();
Class<?> clazz;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
index 5c27561..2e9e231 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionDescriptorProvider.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.external.library;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.om.functions.IExternalFunctionInfo;
import org.apache.asterix.om.functions.IFunctionDescriptor;
import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
@@ -28,11 +29,11 @@
public class ExternalFunctionDescriptorProvider {
- public static IFunctionDescriptor getExternalFunctionDescriptor(IExternalFunctionInfo finfo)
- throws AlgebricksException {
+ public static IFunctionDescriptor getExternalFunctionDescriptor(IExternalFunctionInfo finfo,
+ IApplicationContext appCtx) throws AlgebricksException {
switch (finfo.getKind()) {
case SCALAR:
- return new ExternalScalarFunctionDescriptor(finfo);
+ return new ExternalScalarFunctionDescriptor(finfo, appCtx);
case AGGREGATE:
case UNNEST:
throw new AlgebricksException("Unsupported function kind :" + finfo.getKind());
@@ -48,10 +49,16 @@
private static final long serialVersionUID = 1L;
private final IFunctionInfo finfo;
private IScalarEvaluatorFactory evaluatorFactory;
+ private final transient IApplicationContext appCtx;
+
+ public ExternalScalarFunctionDescriptor(IFunctionInfo finfo, IApplicationContext appCtx) {
+ this.finfo = finfo;
+ this.appCtx = appCtx;
+ }
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(IScalarEvaluatorFactory[] args) throws AlgebricksException {
- evaluatorFactory = new ExternalScalarFunctionEvaluatorFactory((IExternalFunctionInfo) finfo, args);
+ evaluatorFactory = new ExternalScalarFunctionEvaluatorFactory((IExternalFunctionInfo) finfo, args, appCtx);
return evaluatorFactory;
}
@@ -60,8 +67,4 @@
return finfo.getFunctionIdentifier();
}
- public ExternalScalarFunctionDescriptor(IFunctionInfo finfo) {
- this.finfo = finfo;
- }
-
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
index 0e80e4b..7602b9a 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.external.library;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.external.api.IExternalFunction;
@@ -35,14 +36,14 @@
public class ExternalFunctionProvider {
public static IExternalFunction getExternalFunctionEvaluator(IExternalFunctionInfo finfo,
- IScalarEvaluatorFactory args[], IHyracksTaskContext context) throws HyracksDataException {
+ IScalarEvaluatorFactory args[], IHyracksTaskContext context, IApplicationContext appCtx)
+ throws HyracksDataException {
switch (finfo.getKind()) {
case SCALAR:
- return new ExternalScalarFunction(finfo, args, context);
+ return new ExternalScalarFunction(finfo, args, context, appCtx);
case AGGREGATE:
case UNNEST:
- throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNSUPPORTED_KIND,
- finfo.getKind());
+ throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNSUPPORTED_KIND, finfo.getKind());
default:
throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNKNOWN_KIND, finfo.getKind());
}
@@ -52,8 +53,8 @@
class ExternalScalarFunction extends ExternalFunction implements IExternalScalarFunction, IScalarEvaluator {
public ExternalScalarFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[],
- IHyracksTaskContext context) throws HyracksDataException {
- super(finfo, args, context);
+ IHyracksTaskContext context, IApplicationContext appCtx) throws HyracksDataException {
+ super(finfo, args, context, appCtx);
try {
initialize(functionHelper);
} catch (Exception e) {
@@ -78,11 +79,11 @@
try {
resultBuffer.reset();
((IExternalScalarFunction) externalFunction).evaluate(argumentProvider);
- /*
- * Make sure that if "setResult" is not called,
- * or the result object is missing we let Hyracks storage manager know
- * we want to discard a missing object
- */
+ /*
+ * Make sure that if "setResult" is not called,
+ * or the result object is missing we let Hyracks storage manager know
+ * we want to discard a missing object
+ */
byte byteOutput = resultBuffer.getByteArray()[0];
if (!argumentProvider.isValidResult() || byteOutput == ATypeTag.SERIALIZED_MISSING_TYPE_TAG) {
resultBuffer.getDataOutput().writeByte(ATypeTag.SERIALIZED_MISSING_TYPE_TAG);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
index 99d40c7..8208013 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.external.library;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.om.functions.IExternalFunctionInfo;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
@@ -30,16 +31,21 @@
private static final long serialVersionUID = 1L;
private final IExternalFunctionInfo finfo;
private final IScalarEvaluatorFactory[] args;
+ private final transient IApplicationContext appCtx;
- public ExternalScalarFunctionEvaluatorFactory(IExternalFunctionInfo finfo, IScalarEvaluatorFactory[] args)
- throws AlgebricksException {
+ public ExternalScalarFunctionEvaluatorFactory(IExternalFunctionInfo finfo, IScalarEvaluatorFactory[] args,
+ IApplicationContext appCtx) throws AlgebricksException {
this.finfo = finfo;
this.args = args;
+ this.appCtx = appCtx;
}
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
- return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, ctx);
+ return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, ctx,
+ appCtx == null
+ ? (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext()
+ : appCtx);
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java
index 33d508e..8cd157c 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/RuntimeExternalFunctionUtil.java
@@ -22,6 +22,7 @@
import java.util.HashMap;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.om.base.AMutableInt32;
@@ -35,7 +36,7 @@
public class RuntimeExternalFunctionUtil {
- private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<String, ClassLoader>();
+ private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<>();
public static void registerLibraryClassLoader(String dataverseName, String libraryName, ClassLoader classLoader)
throws RuntimeDataException {
@@ -55,10 +56,11 @@
}
}
- public static IFunctionDescriptor getFunctionDescriptor(IFunctionInfo finfo) throws RuntimeDataException {
+ public static IFunctionDescriptor getFunctionDescriptor(IFunctionInfo finfo, IApplicationContext appCtx)
+ throws RuntimeDataException {
switch (((IExternalFunctionInfo) finfo).getKind()) {
case SCALAR:
- return getScalarFunctionDescriptor(finfo);
+ return getScalarFunctionDescriptor(finfo, appCtx);
case AGGREGATE:
case UNNEST:
case STATEFUL:
@@ -68,8 +70,9 @@
return null;
}
- private static AbstractScalarFunctionDynamicDescriptor getScalarFunctionDescriptor(IFunctionInfo finfo) {
- return new ExternalScalarFunctionDescriptor(finfo);
+ private static AbstractScalarFunctionDynamicDescriptor getScalarFunctionDescriptor(IFunctionInfo finfo,
+ IApplicationContext appCtx) {
+ return new ExternalScalarFunctionDescriptor(finfo, appCtx);
}
public static ByteBuffer allocateArgumentBuffers(IAType type) {
@@ -91,15 +94,15 @@
return new AMutableString("");
default:
return null;
- /*
- ARecordType recordType = (ARecordType) type;
- IAType[] fieldTypes = recordType.getFieldTypes();
- IAObject[] fields = new IAObject[fieldTypes.length];
- for (int i = 0; i < fields.length; i++) {
- fields[i] = allocateArgumentObjects(fieldTypes[i]);
- }
- return new AMutableRecord((ARecordType) type, fields);
- */
+ /*
+ ARecordType recordType = (ARecordType) type;
+ IAType[] fieldTypes = recordType.getFieldTypes();
+ IAObject[] fields = new IAObject[fieldTypes.length];
+ for (int i = 0; i < fields.length; i++) {
+ fields[i] = allocateArgumentObjects(fieldTypes[i]);
+ }
+ return new AMutableRecord((ARecordType) type, fields);
+ */
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
index f289361..121d262 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorNodePushable.java
@@ -23,7 +23,7 @@
import org.apache.asterix.active.ActiveManager;
import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
import org.apache.asterix.external.feed.dataflow.SyncFeedRuntimeInputHandler;
import org.apache.asterix.external.feed.management.FeedConnectionId;
@@ -51,7 +51,7 @@
this.partition = partition;
this.connectionId = feedConnectionId;
this.policyAccessor = new FeedPolicyAccessor(feedPolicy);
- this.activeManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+ this.activeManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
.getApplicationContext()).getActiveManager();
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
index b911bf1..1dce8ee 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedIntakeOperatorDescriptor.java
@@ -22,7 +22,7 @@
import java.util.logging.Logger;
import org.apache.asterix.active.EntityId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.library.ILibraryManager;
@@ -102,22 +102,22 @@
private IAdapterFactory createExternalAdapterFactory(IHyracksTaskContext ctx) throws HyracksDataException {
IAdapterFactory adapterFactory;
- IAppRuntimeContext runtimeCtx = (IAppRuntimeContext) ctx.getJobletContext()
- .getServiceContext().getApplicationContext();
+ INcApplicationContext runtimeCtx =
+ (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
ILibraryManager libraryManager = runtimeCtx.getLibraryManager();
ClassLoader classLoader = libraryManager.getLibraryClassLoader(feedId.getDataverse(), adaptorLibraryName);
if (classLoader != null) {
try {
adapterFactory = (IAdapterFactory) (classLoader.loadClass(adaptorFactoryClassName).newInstance());
adapterFactory.setOutputType(adapterOutputType);
- adapterFactory.configure(libraryManager, adaptorConfiguration);
+ adapterFactory.configure(ctx.getJobletContext().getServiceContext(), adaptorConfiguration);
} catch (Exception e) {
throw new HyracksDataException(e);
}
} else {
RuntimeDataException err = new RuntimeDataException(
- ErrorCode.OPERATORS_FEED_INTAKE_OPERATOR_DESCRIPTOR_CLASSLOADER_NOT_CONFIGURED,
- adaptorLibraryName, feedId.getDataverse());
+ ErrorCode.OPERATORS_FEED_INTAKE_OPERATOR_DESCRIPTOR_CLASSLOADER_NOT_CONFIGURED, adaptorLibraryName,
+ feedId.getDataverse());
LOGGER.severe(err.getMessage());
throw err;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
index 6732b15..6acaefb 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
@@ -25,7 +25,7 @@
import org.apache.asterix.active.ActiveManager;
import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
import org.apache.asterix.external.feed.dataflow.SyncFeedRuntimeInputHandler;
import org.apache.asterix.external.feed.management.FeedConnectionId;
@@ -103,7 +103,7 @@
this.policyAccessor = new FeedPolicyAccessor(feedPolicyProperties);
this.partition = partition;
this.connectionId = feedConnectionId;
- this.feedManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+ this.feedManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
.getApplicationContext()).getActiveManager();
this.message = new VSizeFrame(ctx);
TaskUtil.putInSharedMap(HyracksConstants.KEY_MESSAGE, message, ctx);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
index f2193af..87b92c2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
@@ -25,7 +25,7 @@
import org.apache.asterix.active.ActiveManager;
import org.apache.asterix.active.ActiveRuntimeId;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.dataflow.LSMInsertDeleteOperatorNodePushable;
import org.apache.asterix.external.feed.dataflow.FeedRuntimeInputHandler;
import org.apache.asterix.external.feed.dataflow.SyncFeedRuntimeInputHandler;
@@ -97,7 +97,7 @@
this.policyAccessor = new FeedPolicyAccessor(feedPolicyProperties);
this.partition = partition;
this.connectionId = feedConnectionId;
- this.feedManager = (ActiveManager) ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext()
+ this.feedManager = (ActiveManager) ((INcApplicationContext) ctx.getJobletContext().getServiceContext()
.getApplicationContext()).getActiveManager();
this.targetId = targetId;
this.message = new VSizeFrame(ctx);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index 059de63..d6ac5d1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -21,7 +21,6 @@
import java.util.List;
import java.util.Map;
-import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.external.adapter.factory.GenericAdapterFactory;
import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
import org.apache.asterix.external.api.IAdapterFactory;
@@ -30,6 +29,7 @@
import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.dataflow.value.IMissingWriterFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -39,19 +39,19 @@
public class AdapterFactoryProvider {
// Adapters
- public static IAdapterFactory getAdapterFactory(ILibraryManager libraryManager, String adapterName,
+ public static IAdapterFactory getAdapterFactory(IServiceContext serviceCtx, String adapterName,
Map<String, String> configuration, ARecordType itemType, ARecordType metaType)
throws HyracksDataException, AlgebricksException {
ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
GenericAdapterFactory adapterFactory = new GenericAdapterFactory();
adapterFactory.setOutputType(itemType);
adapterFactory.setMetaType(metaType);
- adapterFactory.configure(libraryManager, configuration);
+ adapterFactory.configure(serviceCtx, configuration);
return adapterFactory;
}
// Indexing Adapters
- public static IIndexingAdapterFactory getIndexingAdapterFactory(ILibraryManager libraryManager, String adapterName,
+ public static IIndexingAdapterFactory getIndexingAdapterFactory(IServiceContext serviceCtx, String adapterName,
Map<String, String> configuration, ARecordType itemType, List<ExternalFile> snapshot, boolean indexingOp,
ARecordType metaType) throws HyracksDataException, AlgebricksException {
ExternalDataCompatibilityUtils.prepare(adapterName, configuration);
@@ -59,18 +59,18 @@
adapterFactory.setOutputType(itemType);
adapterFactory.setMetaType(metaType);
adapterFactory.setSnapshot(snapshot, indexingOp);
- adapterFactory.configure(libraryManager, configuration);
+ adapterFactory.configure(serviceCtx, configuration);
return adapterFactory;
}
// Lookup Adapters
- public static LookupAdapterFactory<?> getLookupAdapterFactory(ILibraryManager libraryManager,
+ public static LookupAdapterFactory<?> getLookupAdapterFactory(IServiceContext serviceCtx,
Map<String, String> configuration, ARecordType recordType, int[] ridFields, boolean retainInput,
boolean retainMissing, IMissingWriterFactory missingWriterFactory)
throws HyracksDataException, AlgebricksException {
LookupAdapterFactory<?> adapterFactory = new LookupAdapterFactory<>(recordType, ridFields, retainInput,
retainMissing, missingWriterFactory);
- adapterFactory.configure(libraryManager, configuration);
+ adapterFactory.configure(serviceCtx, configuration);
return adapterFactory;
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
index a131ae7..69f619f 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/LookupReaderFactoryProvider.java
@@ -25,18 +25,19 @@
import org.apache.asterix.external.input.record.reader.hdfs.HDFSLookupReaderFactory;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.HDFSUtils;
+import org.apache.hyracks.api.application.IServiceContext;
public class LookupReaderFactoryProvider {
@SuppressWarnings("rawtypes")
- public static ILookupReaderFactory getLookupReaderFactory(Map<String, String> configuration)
- throws AsterixException {
+ public static ILookupReaderFactory getLookupReaderFactory(IServiceContext serviceCtx,
+ Map<String, String> configuration) throws AsterixException {
String inputFormat = HDFSUtils.getInputFormatClassName(configuration);
if (inputFormat.equals(ExternalDataConstants.CLASS_NAME_TEXT_INPUT_FORMAT)
|| inputFormat.equals(ExternalDataConstants.CLASS_NAME_SEQUENCE_INPUT_FORMAT)
|| inputFormat.equals(ExternalDataConstants.CLASS_NAME_RC_INPUT_FORMAT)) {
- HDFSLookupReaderFactory<Object> readerFactory = new HDFSLookupReaderFactory<Object>();
- readerFactory.configure(configuration);
+ HDFSLookupReaderFactory<Object> readerFactory = new HDFSLookupReaderFactory<>();
+ readerFactory.configure(serviceCtx, configuration);
return readerFactory;
} else {
throw new AsterixException("Unrecognized external format");
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
index def3ee2..b4353e7 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/HDFSUtils.java
@@ -24,6 +24,7 @@
import java.util.List;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
@@ -31,19 +32,19 @@
import org.apache.asterix.external.indexing.IndexingScheduler;
import org.apache.asterix.external.indexing.RecordId.RecordIdType;
import org.apache.asterix.external.input.stream.HDFSInputStream;
+import org.apache.asterix.hivecompat.io.RCFileInputFormat;
+import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.asterix.hivecompat.io.RCFileInputFormat;
-import org.apache.asterix.runtime.utils.AppContextInfo;
-import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.context.ICCContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.HyracksException;
@@ -51,8 +52,8 @@
public class HDFSUtils {
- public static Scheduler initializeHDFSScheduler() throws HyracksDataException {
- ICCContext ccContext = AppContextInfo.INSTANCE.getCCServiceContext().getCCContext();
+ public static Scheduler initializeHDFSScheduler(ICCServiceContext serviceCtx) throws HyracksDataException {
+ ICCContext ccContext = serviceCtx.getCCContext();
Scheduler scheduler = null;
try {
scheduler = new Scheduler(ccContext.getClusterControllerInfo().getClientNetAddress(),
@@ -63,8 +64,9 @@
return scheduler;
}
- public static IndexingScheduler initializeIndexingHDFSScheduler() throws HyracksDataException {
- ICCContext ccContext = AppContextInfo.INSTANCE.getCCServiceContext().getCCContext();
+ public static IndexingScheduler initializeIndexingHDFSScheduler(ICCServiceContext serviceCtx)
+ throws HyracksDataException {
+ ICCContext ccContext = serviceCtx.getCCContext();
IndexingScheduler scheduler = null;
try {
scheduler = new IndexingScheduler(ccContext.getClusterControllerInfo().getClientNetAddress(),
@@ -90,8 +92,8 @@
public static InputSplit[] getSplits(JobConf conf, List<ExternalFile> files) throws IOException {
// Create file system object
FileSystem fs = FileSystem.get(conf);
- ArrayList<FileSplit> fileSplits = new ArrayList<FileSplit>();
- ArrayList<ExternalFile> orderedExternalFiles = new ArrayList<ExternalFile>();
+ ArrayList<FileSplit> fileSplits = new ArrayList<>();
+ ArrayList<ExternalFile> orderedExternalFiles = new ArrayList<>();
// Create files splits
for (ExternalFile file : files) {
Path filePath = new Path(file.getFileName());
@@ -202,11 +204,11 @@
return conf;
}
- public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(
+ public static AlgebricksAbsolutePartitionConstraint getPartitionConstraints(IApplicationContext appCtx,
AlgebricksAbsolutePartitionConstraint clusterLocations) {
if (clusterLocations == null) {
ArrayList<String> locs = new ArrayList<>();
- Map<String, String[]> stores = AppContextInfo.INSTANCE.getMetadataProperties().getStores();
+ Map<String, String[]> stores = appCtx.getMetadataProperties().getStores();
for (String node : stores.keySet()) {
int numIODevices = ClusterStateManager.INSTANCE.getIODevices(node).length;
for (int k = 0; k < numIODevices; k++) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java
index bda5f1e..9a4ddff 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/IdentityResolver.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.external.util;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.external.api.INodeResolver;
/**
@@ -27,7 +28,7 @@
public class IdentityResolver implements INodeResolver {
@Override
- public String resolveNode(String value) {
+ public String resolveNode(ICcApplicationContext appCtx, String value) {
return value;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java
index 84346c4..5b15e9e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/NodeResolver.java
@@ -27,6 +27,7 @@
import java.util.Random;
import java.util.Set;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.external.api.INodeResolver;
@@ -42,15 +43,15 @@
private static final Set<String> ncs = new HashSet<>();
@Override
- public String resolveNode(String value) throws AsterixException {
+ public String resolveNode(ICcApplicationContext appCtx, String value) throws AsterixException {
try {
if (ncMap.isEmpty()) {
- NodeResolver.updateNCs();
+ NodeResolver.updateNCs(appCtx);
}
if (ncs.contains(value)) {
return value;
} else {
- NodeResolver.updateNCs();
+ NodeResolver.updateNCs(appCtx);
if (ncs.contains(value)) {
return value;
}
@@ -71,10 +72,10 @@
}
}
- private static void updateNCs() throws Exception {
+ private static void updateNCs(ICcApplicationContext appCtx) throws Exception {
synchronized (ncMap) {
ncMap.clear();
- RuntimeUtils.getNodeControllerMap(ncMap);
+ RuntimeUtils.getNodeControllerMap(appCtx, ncMap);
synchronized (ncs) {
ncs.clear();
for (Entry<InetAddress, Set<String>> entry : ncMap.entrySet()) {
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
index 3467411..12652f3 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/RecordWithPKTestReaderFactory.java
@@ -20,27 +20,32 @@
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.external.api.IExternalDataSourceFactory;
import org.apache.asterix.external.api.IRecordReader;
import org.apache.asterix.external.api.IRecordReaderFactory;
import org.apache.asterix.external.input.record.RecordWithPK;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
public class RecordWithPKTestReaderFactory implements IRecordReaderFactory<RecordWithPK<char[]>> {
private static final long serialVersionUID = 1L;
private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+ private transient IServiceContext serviceCtx;
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
- clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
+ clusterLocations = IExternalDataSourceFactory
+ .getPartitionConstraints((IApplicationContext) serviceCtx.getApplicationContext(), clusterLocations, 1);
return clusterLocations;
}
@Override
- public void configure(final Map<String, String> configuration) {
+ public void configure(IServiceContext serviceCtx, final Map<String, String> configuration) {
+ this.serviceCtx = serviceCtx;
}
@Override
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
index 98105b2..d2e0281 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/kv/KVTestReaderFactory.java
@@ -24,6 +24,7 @@
import org.apache.asterix.external.api.IRecordReaderFactory;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import com.couchbase.client.core.message.dcp.DCPRequest;
@@ -48,7 +49,7 @@
}
@Override
- public void configure(final Map<String, String> configuration) {
+ public void configure(IServiceContext serviceCtx, final Map<String, String> configuration) {
if (configuration.containsKey("num-of-records")) {
numOfRecords = Integer.parseInt(configuration.get("num-of-records"));
}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index a108908..1c28940 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -22,9 +22,8 @@
import java.io.InputStream;
import java.util.Map;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
-import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.external.api.IDataSourceAdapter;
import org.apache.asterix.external.api.IExternalDataSourceFactory;
@@ -36,6 +35,7 @@
import org.apache.asterix.om.types.ARecordType;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.application.IServiceContext;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -55,6 +55,8 @@
private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
+ private transient IServiceContext serviceContext;
+
@Override
public String getAlias() {
return "test_typed";
@@ -62,7 +64,8 @@
@Override
public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AlgebricksException {
- clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
+ clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(
+ (IApplicationContext) serviceContext.getApplicationContext(), clusterLocations, 1);
return clusterLocations;
}
@@ -77,10 +80,9 @@
ADMDataParser parser;
ITupleForwarder forwarder;
ArrayTupleBuilder tb;
- IPropertiesProvider propertiesProvider =
- (IPropertiesProvider) ctx.getJobletContext().getServiceContext().getApplicationContext();
- ClusterPartition nodePartition =
- propertiesProvider.getMetadataProperties().getNodePartitions().get(nodeId)[0];
+ IApplicationContext appCtx =
+ (IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+ ClusterPartition nodePartition = appCtx.getMetadataProperties().getNodePartitions().get(nodeId)[0];
parser = new ADMDataParser(outputType, true);
forwarder = DataflowUtils.getTupleForwarder(configuration,
FeedUtils.getFeedLogManager(ctx,
@@ -118,7 +120,8 @@
}
@Override
- public void configure(ILibraryManager context, Map<String, String> configuration) {
+ public void configure(IServiceContext serviceContext, Map<String, String> configuration) {
+ this.serviceContext = serviceContext;
this.configuration = configuration;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
index 8080fcb..2f30ff1 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/MetadataNode.java
@@ -28,11 +28,10 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.dataflow.LSMIndexUtil;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.functions.FunctionSignature;
@@ -141,13 +140,12 @@
super();
}
- public void initialize(IAppRuntimeContext runtimeContext, MetadataTupleTranslatorProvider tupleTranslatorProvider,
- List<IMetadataExtension> metadataExtensions) {
+ public void initialize(INcApplicationContext runtimeContext,
+ MetadataTupleTranslatorProvider tupleTranslatorProvider, List<IMetadataExtension> metadataExtensions) {
this.tupleTranslatorProvider = tupleTranslatorProvider;
this.transactionSubsystem = runtimeContext.getTransactionSubsystem();
this.datasetLifecycleManager = runtimeContext.getDatasetLifecycleManager();
- this.metadataStoragePartition =
- ((IPropertiesProvider) runtimeContext).getMetadataProperties().getMetadataPartition().getPartitionId();
+ this.metadataStoragePartition = runtimeContext.getMetadataProperties().getMetadataPartition().getPartitionId();
if (metadataExtensions != null) {
extensionDatasets = new HashMap<>();
for (IMetadataExtension metadataExtension : metadataExtensions) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java
index 9ead1ee..4acd673 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/api/IClusterManager.java
@@ -21,6 +21,7 @@
import java.util.Set;
import org.apache.asterix.common.api.IClusterEventsSubscriber;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.event.schema.cluster.Node;
@@ -30,7 +31,7 @@
* @param node
* @throws AsterixException
*/
- public void addNode(Node node) throws AsterixException;
+ public void addNode(ICcApplicationContext appCtx, Node node) throws AsterixException;
/**
* @param node
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index 15a2783..3170a68 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -26,7 +26,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.config.ClusterProperties;
@@ -106,7 +106,7 @@
public class MetadataBootstrap {
public static final boolean IS_DEBUG_MODE = false;
private static final Logger LOGGER = Logger.getLogger(MetadataBootstrap.class.getName());
- private static IAppRuntimeContext appContext;
+ private static INcApplicationContext appContext;
private static IBufferCache bufferCache;
private static IFileMapProvider fileMapProvider;
private static IDatasetLifecycleManager dataLifecycleManager;
@@ -142,7 +142,7 @@
public static void startUniverse(INCServiceContext ncServiceContext, boolean isNewUniverse)
throws RemoteException, ACIDException, MetadataException {
MetadataBootstrap.setNewUniverse(isNewUniverse);
- appContext = (IAppRuntimeContext) ncServiceContext.getApplicationContext();
+ appContext = (INcApplicationContext) ncServiceContext.getApplicationContext();
MetadataProperties metadataProperties = appContext.getMetadataProperties();
metadataNodeName = metadataProperties.getMetadataNodeName();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java
index 5bfe876..1b4f5dc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManager.java
@@ -31,8 +31,9 @@
import org.apache.asterix.common.api.IClusterEventsSubscriber;
import org.apache.asterix.common.api.IClusterManagementWork;
-import org.apache.asterix.common.config.ExternalProperties;
import org.apache.asterix.common.config.ClusterProperties;
+import org.apache.asterix.common.config.ExternalProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.event.management.AsterixEventServiceClient;
import org.apache.asterix.event.model.AsterixInstance;
@@ -47,7 +48,6 @@
import org.apache.asterix.event.util.PatternCreator;
import org.apache.asterix.installer.schema.conf.Configuration;
import org.apache.asterix.metadata.api.IClusterManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
public class ClusterManager implements IClusterManager {
@@ -91,17 +91,17 @@
}
@Override
- public void addNode(Node node) throws AsterixException {
+ public void addNode(ICcApplicationContext appCtx, Node node) throws AsterixException {
try {
Cluster cluster = ClusterProperties.INSTANCE.getCluster();
- List<Pattern> pattern = new ArrayList<Pattern>();
- String asterixInstanceName = AppContextInfo.INSTANCE.getMetadataProperties().getInstanceName();
+ List<Pattern> pattern = new ArrayList<>();
+ String asterixInstanceName = appCtx.getMetadataProperties().getInstanceName();
Patterns prepareNode = PatternCreator.INSTANCE.createPrepareNodePattern(asterixInstanceName,
ClusterProperties.INSTANCE.getCluster(), node);
cluster.getNode().add(node);
client.submit(prepareNode);
- ExternalProperties externalProps = AppContextInfo.INSTANCE.getExternalProperties();
+ ExternalProperties externalProps = appCtx.getExternalProperties();
AsterixEventServiceUtil.poulateClusterEnvironmentProperties(cluster, externalProps.getCCJavaParams(),
externalProps.getNCJavaParams());
@@ -110,8 +110,8 @@
String hostId = node.getId();
String nodeControllerId = asterixInstanceName + "_" + node.getId();
String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
- Pattern startNC = PatternCreator.INSTANCE.createNCStartPattern(ccHost, hostId, nodeControllerId, iodevices,
- false);
+ Pattern startNC =
+ PatternCreator.INSTANCE.createNCStartPattern(ccHost, hostId, nodeControllerId, iodevices, false);
pattern.add(startNC);
Patterns startNCPattern = new Patterns(pattern);
client.submit(startNCPattern);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java
index cbb3229..d11e70d 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagerProvider.java
@@ -23,6 +23,7 @@
import org.apache.asterix.common.api.IClusterEventsSubscriber;
import org.apache.asterix.common.config.ClusterProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.event.schema.cluster.Cluster;
import org.apache.asterix.event.schema.cluster.Node;
@@ -57,7 +58,7 @@
}
private static class NoopClusterManager implements IClusterManager {
@Override
- public void addNode(Node node) throws AsterixException {
+ public void addNode(ICcApplicationContext appCtx, Node node) throws AsterixException {
// no-op
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java
index 90c8b1e..277587f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/DatasetHints.java
@@ -21,7 +21,7 @@
import java.util.HashSet;
import java.util.Set;
-import org.apache.asterix.runtime.utils.AppContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.algebricks.common.utils.Pair;
/**
@@ -42,19 +42,19 @@
* first element as a boolean that represents the validation result.
* second element as the error message if the validation result is false
*/
- public static Pair<Boolean, String> validate(String hintName, String value) {
+ public static Pair<Boolean, String> validate(ICcApplicationContext appCtx, String hintName, String value) {
for (IHint h : hints) {
if (h.getName().equalsIgnoreCase(hintName.trim())) {
- return h.validateValue(value);
+ return h.validateValue(appCtx, value);
}
}
- return new Pair<Boolean, String>(false, "Unknown hint :" + hintName);
+ return new Pair<>(false, "Unknown hint :" + hintName);
}
private static Set<IHint> hints = initHints();
private static Set<IHint> initHints() {
- Set<IHint> hints = new HashSet<IHint>();
+ Set<IHint> hints = new HashSet<>();
hints.add(new DatasetCardinalityHint());
hints.add(new DatasetNodegroupCardinalityHint());
return hints;
@@ -74,19 +74,19 @@
}
@Override
- public Pair<Boolean, String> validateValue(String value) {
+ public Pair<Boolean, String> validateValue(ICcApplicationContext appCtx, String value) {
boolean valid = true;
long longValue;
try {
longValue = Long.parseLong(value);
if (longValue < 0) {
- return new Pair<Boolean, String>(false, "Value must be >= 0");
+ return new Pair<>(false, "Value must be >= 0");
}
} catch (NumberFormatException nfe) {
valid = false;
- return new Pair<Boolean, String>(valid, "Inappropriate value");
+ return new Pair<>(valid, "Inappropriate value");
}
- return new Pair<Boolean, String>(true, null);
+ return new Pair<>(true, null);
}
}
@@ -105,26 +105,25 @@
}
@Override
- public Pair<Boolean, String> validateValue(String value) {
+ public Pair<Boolean, String> validateValue(ICcApplicationContext appCtx, String value) {
boolean valid = true;
int intValue;
try {
intValue = Integer.parseInt(value);
if (intValue < 0) {
- return new Pair<Boolean, String>(false, "Value must be >= 0");
+ return new Pair<>(false, "Value must be >= 0");
}
- int numNodesInCluster = AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames()
- .size();
+ int numNodesInCluster = appCtx.getMetadataProperties().getNodeNames().size();
if (numNodesInCluster < intValue) {
- return new Pair<Boolean, String>(false,
+ return new Pair<>(false,
"Value must be greater or equal to the existing number of nodes in cluster ("
+ numNodesInCluster + ")");
}
} catch (NumberFormatException nfe) {
valid = false;
- return new Pair<Boolean, String>(valid, "Inappropriate value");
+ return new Pair<>(valid, "Inappropriate value");
}
- return new Pair<Boolean, String>(true, null);
+ return new Pair<>(true, null);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java
index fefc066..97422f5 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/dataset/hints/IHint.java
@@ -18,6 +18,7 @@
*/
package org.apache.asterix.metadata.dataset.hints;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.algebricks.common.utils.Pair;
/**
@@ -41,6 +42,6 @@
* first element as a boolean that represents the validation result.
* second element as the error message if the validation result is false
*/
- public Pair<Boolean, String> validateValue(String value);
+ public Pair<Boolean, String> validateValue(ICcApplicationContext appCtx, String value);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index aa76122..d111eb2 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -31,7 +31,7 @@
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.config.StorageProperties;
import org.apache.asterix.common.context.IStorageComponentProvider;
-import org.apache.asterix.common.dataflow.IApplicationContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.dataflow.LSMInvertedIndexInsertDeleteOperatorDescriptor;
import org.apache.asterix.common.dataflow.LSMTreeInsertDeleteOperatorDescriptor;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -86,7 +86,6 @@
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
import org.apache.asterix.runtime.operators.LSMInvertedIndexUpsertOperatorDescriptor;
import org.apache.asterix.runtime.operators.LSMTreeUpsertOperatorDescriptor;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.asterix.runtime.utils.RuntimeComponentsProvider;
import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
@@ -146,6 +145,7 @@
public class MetadataProvider implements IMetadataProvider<DataSourceId, String> {
+ private final ICcApplicationContext appCtx;
private final IStorageComponentProvider storaegComponentProvider;
private final IMetadataPageManagerFactory metadataPageManagerFactory;
private final IPrimitiveValueProviderFactory primitiveValueProviderFactory;
@@ -167,11 +167,13 @@
private boolean isTemporaryDatasetWriteJob = true;
private boolean blockingOperatorDisabled = false;
- public MetadataProvider(Dataverse defaultDataverse, IStorageComponentProvider componentProvider) {
+ public MetadataProvider(ICcApplicationContext appCtx, Dataverse defaultDataverse,
+ IStorageComponentProvider componentProvider) {
+ this.appCtx = appCtx;
this.defaultDataverse = defaultDataverse;
this.storaegComponentProvider = componentProvider;
- storageProperties = AppContextInfo.INSTANCE.getStorageProperties();
- libraryManager = AppContextInfo.INSTANCE.getLibraryManager();
+ storageProperties = appCtx.getStorageProperties();
+ libraryManager = appCtx.getLibraryManager();
metadataPageManagerFactory = componentProvider.getMetadataPageManagerFactory();
primitiveValueProviderFactory = componentProvider.getPrimitiveValueProviderFactory();
locks = new LockList();
@@ -417,8 +419,8 @@
public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
JobSpecification jobSpec, Feed primaryFeed, FeedPolicyAccessor policyAccessor) throws Exception {
Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput;
- factoryOutput =
- FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx, libraryManager);
+ factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx,
+ getApplicationContext());
ARecordType recordType = FeedMetadataUtil.getOutputType(primaryFeed, primaryFeed.getAdapterConfiguration(),
ExternalDataConstants.KEY_TYPE_NAME);
IAdapterFactory adapterFactory = factoryOutput.first;
@@ -510,7 +512,7 @@
context.getBinaryComparatorFactoryProvider());
}
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc;
spPc = getSplitProviderAndConstraints(dataset, theIndex.getIndexName());
int[] primaryKeyFields = new int[numPrimaryKeys];
@@ -593,7 +595,7 @@
outputVars, keysStartIndex, numNestedSecondaryKeyFields, typeEnv, context);
ITypeTraits[] typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, keysStartIndex,
numNestedSecondaryKeyFields + numPrimaryKeys, typeEnv, context);
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc =
getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
ARecordType metaType = null;
@@ -732,7 +734,7 @@
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
getSplitProviderAndConstraints(dataset);
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
long numElementsHint = getCardinalityPerPartitionHint(dataset);
// TODO
@@ -883,8 +885,8 @@
Map<String, String> configuration, ARecordType itemType, ARecordType metaType) throws AlgebricksException {
try {
configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
- IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(libraryManager, adapterName,
- configuration, itemType, metaType);
+ IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(
+ getApplicationContext().getServiceContext(), adapterName, configuration, itemType, metaType);
// check to see if dataset is indexed
Index filesIndex =
@@ -969,9 +971,9 @@
.getDatatype();
}
ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
- LookupAdapterFactory<?> adapterFactory =
- AdapterFactoryProvider.getLookupAdapterFactory(libraryManager, datasetDetails.getProperties(),
- itemType, ridIndexes, retainInput, retainMissing, context.getMissingWriterFactory());
+ LookupAdapterFactory<?> adapterFactory = AdapterFactoryProvider.getLookupAdapterFactory(
+ getApplicationContext().getServiceContext(), datasetDetails.getProperties(), itemType, ridIndexes,
+ retainInput, retainMissing, context.getMissingWriterFactory());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo;
try {
@@ -988,7 +990,7 @@
fileIndex, itemType, metaType, compactionInfo.first, compactionInfo.second);
// Create the out record descriptor, appContext and fileSplitProvider for the files index
RecordDescriptor outRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc;
spPc = metadataProvider.splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
dataset.getDatasetName(), fileIndexName, false);
@@ -1059,7 +1061,7 @@
.getDatatype(mdTxnCtx, itemTypeDataverseName, itemTypeName).getDatatype();
ARecordType metaItemType = DatasetUtil.getMetaType(this, dataset);
ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
itemType, metaItemType, context.getBinaryComparatorFactoryProvider());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
@@ -1251,7 +1253,7 @@
ARecordType metaItemType = DatasetUtil.getMetaType(this, dataset);
ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
itemType, metaItemType, context.getBinaryComparatorFactoryProvider());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
@@ -1450,7 +1452,7 @@
++i;
}
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
@@ -1587,7 +1589,7 @@
ARecordType metaItemType = DatasetUtil.getMetaType(this, dataset);
IBinaryComparatorFactory[] primaryComparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(
dataset, recType, metaItemType, context.getBinaryComparatorFactoryProvider());
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
int[] btreeFields = new int[primaryComparatorFactories.length];
@@ -1808,7 +1810,7 @@
}
}
- IApplicationContextInfo appContext = (IApplicationContextInfo) context.getAppContext();
+ ICcApplicationContext appContext = (ICcApplicationContext) context.getAppContext();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
@@ -2074,4 +2076,8 @@
public LockList getLocks() {
return locks;
}
+
+ public ICcApplicationContext getApplicationContext() {
+ return appCtx;
+ }
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 572130d..8fbefb9 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -26,7 +26,7 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.active.ActiveJobNotificationHandler;
+import org.apache.asterix.active.ActiveLifecycleListener;
import org.apache.asterix.active.IActiveEntityEventsListener;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.context.IStorageComponentProvider;
@@ -275,13 +275,16 @@
* @throws Exception
* if an error occur during the drop process or if the dataset can't be dropped for any reason
*/
- public void drop(MetadataProvider metadataProvider, MutableObject<MetadataTransactionContext> mdTxnCtx,
- List<JobSpecification> jobsToExecute, MutableBoolean bActiveTxn, MutableObject<ProgressState> progress,
- IHyracksClientConnection hcc) throws Exception {
+ public void drop(MetadataProvider metadataProvider,
+ MutableObject<MetadataTransactionContext> mdTxnCtx, List<JobSpecification> jobsToExecute,
+ MutableBoolean bActiveTxn, MutableObject<ProgressState> progress, IHyracksClientConnection hcc)
+ throws Exception {
Map<FeedConnectionId, Pair<JobSpecification, Boolean>> disconnectJobList = new HashMap<>();
if (getDatasetType() == DatasetType.INTERNAL) {
// prepare job spec(s) that would disconnect any active feeds involving the dataset.
- IActiveEntityEventsListener[] activeListeners = ActiveJobNotificationHandler.INSTANCE.getEventListeners();
+ ActiveLifecycleListener activeListener =
+ (ActiveLifecycleListener) metadataProvider.getApplicationContext().getActiveLifecycleListener();
+ IActiveEntityEventsListener[] activeListeners = activeListener.getNotificationHandler().getEventListeners();
for (IActiveEntityEventsListener listener : activeListeners) {
if (listener.isEntityUsingDataset(this)) {
throw new CompilationException(ErrorCode.COMPILATION_CANT_DROP_ACTIVE_DATASET,
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index b54c9e6..18e4676 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -22,11 +22,11 @@
import java.util.Map;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
-import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.external.api.IAdapterFactory;
import org.apache.asterix.external.api.IDataSourceAdapter;
import org.apache.asterix.external.api.IDataSourceAdapter.AdapterType;
@@ -95,7 +95,7 @@
return feedPolicy;
}
- public static void validateFeed(Feed feed, MetadataTransactionContext mdTxnCtx, ILibraryManager libraryManager)
+ public static void validateFeed(Feed feed, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx)
throws AsterixException {
try {
String adapterName = feed.getAdapterName();
@@ -122,7 +122,8 @@
case EXTERNAL:
String[] anameComponents = adapterName.split("#");
String libraryName = anameComponents[0];
- ClassLoader cl = libraryManager.getLibraryClassLoader(feed.getDataverseName(), libraryName);
+ ClassLoader cl =
+ appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
break;
default:
@@ -130,10 +131,10 @@
}
adapterFactory.setOutputType(adapterOutputType);
adapterFactory.setMetaType(metaType);
- adapterFactory.configure(null, configuration);
+ adapterFactory.configure(appCtx.getServiceContext(), configuration);
} else {
- AdapterFactoryProvider.getAdapterFactory(libraryManager, adapterName, configuration, adapterOutputType,
- metaType);
+ AdapterFactoryProvider.getAdapterFactory(appCtx.getServiceContext(), adapterName, configuration,
+ adapterOutputType, metaType);
}
if (metaType == null && configuration.containsKey(ExternalDataConstants.KEY_META_TYPE_NAME)) {
metaType = getOutputType(feed, configuration, ExternalDataConstants.KEY_META_TYPE_NAME);
@@ -159,7 +160,7 @@
@SuppressWarnings("rawtypes")
public static Triple<IAdapterFactory, RecordDescriptor, AdapterType> getPrimaryFeedFactoryAndOutput(Feed feed,
- FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx, ILibraryManager libraryManager)
+ FeedPolicyAccessor policyAccessor, MetadataTransactionContext mdTxnCtx, ICcApplicationContext appCtx)
throws AlgebricksException {
// This method needs to be re-visited
String adapterName = null;
@@ -194,7 +195,8 @@
case EXTERNAL:
String[] anameComponents = adapterName.split("#");
String libraryName = anameComponents[0];
- ClassLoader cl = libraryManager.getLibraryClassLoader(feed.getDataverseName(), libraryName);
+ ClassLoader cl =
+ appCtx.getLibraryManager().getLibraryClassLoader(feed.getDataverseName(), libraryName);
adapterFactory = (IAdapterFactory) cl.loadClass(adapterFactoryClassname).newInstance();
break;
default:
@@ -202,10 +204,10 @@
}
adapterFactory.setOutputType(adapterOutputType);
adapterFactory.setMetaType(metaType);
- adapterFactory.configure(null, configuration);
+ adapterFactory.configure(appCtx.getServiceContext(), configuration);
} else {
- adapterFactory = AdapterFactoryProvider.getAdapterFactory(libraryManager, adapterName, configuration,
- adapterOutputType, metaType);
+ adapterFactory = AdapterFactoryProvider.getAdapterFactory(appCtx.getServiceContext(), adapterName,
+ configuration, adapterOutputType, metaType);
adapterType = IDataSourceAdapter.AdapterType.INTERNAL;
}
if (metaType == null) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index ca56cc3..6cb4a4f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -320,18 +320,18 @@
return null;
}
- public static JobSpecification createDropDatasetJobSpec(Dataset dataset, Index primaryIndex,
- MetadataProvider metadataProvider)
+ public static JobSpecification createDropDatasetJobSpec(Dataset dataset,
+ Index primaryIndex, MetadataProvider metadataProvider)
throws AlgebricksException, HyracksDataException, RemoteException, ACIDException {
String datasetPath = dataset.getDataverseName() + File.separator + dataset.getDatasetName();
LOGGER.info("DROP DATASETPATH: " + datasetPath);
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
- return RuntimeUtils.createJobSpecification();
+ return RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
}
ARecordType itemType =
(ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
- JobSpecification specPrimary = RuntimeUtils.createJobSpecification();
+ JobSpecification specPrimary = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadataProvider.getSplitProviderAndConstraints(dataset);
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
@@ -352,7 +352,7 @@
public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset)
throws AlgebricksException {
String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName());
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadataProvider.splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
@@ -380,12 +380,12 @@
String datasetPath = dataset.getDataverseName() + File.separator + dataset.getDatasetName();
LOGGER.info("DROP DATASETPATH: " + datasetPath);
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
- return RuntimeUtils.createJobSpecification();
+ return RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
}
ARecordType itemType =
(ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
- JobSpecification specPrimary = RuntimeUtils.createJobSpecification();
+ JobSpecification specPrimary = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadataProvider.getSplitProviderAndConstraints(dataset);
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
@@ -406,8 +406,8 @@
return specPrimary;
}
- public static JobSpecification createDatasetJobSpec(Dataverse dataverse, String datasetName,
- MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+ public static JobSpecification createDatasetJobSpec(Dataverse dataverse,
+ String datasetName, MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
String dataverseName = dataverse.getDataverseName();
IDataFormat format;
@@ -430,7 +430,7 @@
metaItemType = (ARecordType) metadataProvider.findType(dataset.getMetaItemTypeDataverseName(),
dataset.getMetaItemTypeName());
}
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
itemType, metaItemType, format.getBinaryComparatorFactoryProvider());
ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
@@ -474,8 +474,8 @@
return spec;
}
- public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
- MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+ public static JobSpecification compactDatasetJobSpec(Dataverse dataverse,
+ String datasetName, MetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
String dataverseName = dataverse.getDataverseName();
IDataFormat format;
try {
@@ -490,7 +490,7 @@
ARecordType itemType =
(ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
ARecordType metaItemType = DatasetUtil.getMetaType(metadataProvider, dataset);
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IBinaryComparatorFactory[] comparatorFactories = DatasetUtil.computeKeysBinaryComparatorFactories(dataset,
itemType, metaItemType, format.getBinaryComparatorFactoryProvider());
ITypeTraits[] typeTraits = DatasetUtil.computeTupleTypeTraits(dataset, itemType, metaItemType);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
index edaa73e..ed6de39 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
@@ -191,7 +191,7 @@
List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider, boolean createIndex)
throws AlgebricksException {
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
@@ -205,10 +205,9 @@
Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
dataset.getDataverseName(), dataset.getDatasetName(), fileIndexName);
IResourceFactory localResourceMetadata = new ExternalBTreeLocalResourceMetadataFactory(
- filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
- FilesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, false, dataset.getDatasetId(),
- mergePolicyFactory, mergePolicyFactoryProperties, dataset.getIndexOperationTrackerFactory(fileIndex),
- dataset.getIoOperationCallbackFactory(fileIndex),
+ filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS, FilesIndexDescription.FILES_INDEX_COMP_FACTORIES,
+ new int[] { 0 }, false, dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties,
+ dataset.getIndexOperationTrackerFactory(fileIndex), dataset.getIoOperationCallbackFactory(fileIndex),
storageComponentProvider.getMetadataPageManagerFactory());
PersistentLocalResourceFactoryProvider localResourceFactoryProvider =
new PersistentLocalResourceFactoryProvider(localResourceMetadata, LocalResource.ExternalBTreeResource);
@@ -244,9 +243,9 @@
List<ExternalFile> files, RecordDescriptor indexerDesc) throws HyracksDataException, AlgebricksException {
ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
Map<String, String> configuration = externalDatasetDetails.getProperties();
- IAdapterFactory adapterFactory =
- AdapterFactoryProvider.getIndexingAdapterFactory(metadataProvider.getLibraryManager(),
- externalDatasetDetails.getAdapter(), configuration, (ARecordType) itemType, files, true, null);
+ IAdapterFactory adapterFactory = AdapterFactoryProvider.getIndexingAdapterFactory(
+ metadataProvider.getApplicationContext().getServiceContext(), externalDatasetDetails.getAdapter(),
+ configuration, (ARecordType) itemType, files, true, null);
return new Pair<>(new ExternalScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
adapterFactory.getPartitionConstraint());
}
@@ -311,8 +310,8 @@
} else {
// Same file name, Different file mod date -> delete and add
metadataFile.setPendingOp(ExternalFilePendingOp.DROP_OP);
- deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
- 0, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile.getSize(),
+ deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), 0,
+ metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile.getSize(),
ExternalFilePendingOp.DROP_OP));
fileSystemFile.setPendingOp(ExternalFilePendingOp.ADD_OP);
fileSystemFile.setFileNumber(newFileNumber);
@@ -375,7 +374,7 @@
public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset)
throws AlgebricksException {
String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName());
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadataProvider.splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(),
dataset.getDatasetName(), indexName, true);
@@ -408,9 +407,9 @@
} else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
for (ExternalFile appendedFile : appendedFiles) {
if (appendedFile.getFileName().equals(file.getFileName())) {
- files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(),
- file.getFileNumber(), file.getFileName(), file.getLastModefiedTime(),
- appendedFile.getSize(), ExternalFilePendingOp.NO_OP));
+ files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(),
+ file.getFileName(), file.getLastModefiedTime(), appendedFile.getSize(),
+ ExternalFilePendingOp.NO_OP));
}
}
}
@@ -449,20 +448,19 @@
public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider)
throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
DatasetUtil.getMergePolicyFactory(ds, metadataProvider.getMetadataTxnContext());
ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint =
- metadataProvider.getSplitProviderAndConstraints(ds,
- IndexingConstants.getFilesIndexName(ds.getDatasetName()));
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+ .getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName()));
IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
String fileIndexName = BTreeDataflowHelperFactoryProvider.externalFileIndexName(ds);
Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
ds.getDataverseName(), ds.getDatasetName(), fileIndexName);
- IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(
- metadataProvider, fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
+ IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(metadataProvider,
+ fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
RuntimeComponentsProvider.RUNTIME_PROVIDER, RuntimeComponentsProvider.RUNTIME_PROVIDER);
@@ -492,20 +490,19 @@
public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider)
throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
DatasetUtil.getMergePolicyFactory(ds, metadataProvider.getMetadataTxnContext());
ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint =
- metadataProvider.getSplitProviderAndConstraints(ds,
- IndexingConstants.getFilesIndexName(ds.getDatasetName()));
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+ .getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName()));
IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
String fileIndexName = BTreeDataflowHelperFactoryProvider.externalFileIndexName(ds);
Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
ds.getDataverseName(), ds.getDatasetName(), fileIndexName);
- IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(
- metadataProvider, fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
+ IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(metadataProvider,
+ fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
RuntimeComponentsProvider.RUNTIME_PROVIDER, RuntimeComponentsProvider.RUNTIME_PROVIDER);
@@ -536,20 +533,19 @@
public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider)
throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
DatasetUtil.getMergePolicyFactory(ds, metadataProvider.getMetadataTxnContext());
ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint =
- metadataProvider.getSplitProviderAndConstraints(ds,
- IndexingConstants.getFilesIndexName(ds.getDatasetName()));
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+ .getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName()));
IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
String fileIndexName = BTreeDataflowHelperFactoryProvider.externalFileIndexName(ds);
Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(),
ds.getDataverseName(), ds.getDatasetName(), fileIndexName);
- IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(
- metadataProvider, fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
+ IIndexDataflowHelperFactory filesIndexDataflowHelperFactory = ds.getIndexDataflowHelperFactory(metadataProvider,
+ fileIndex, null, null, mergePolicyFactory, mergePolicyFactoryProperties);
IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
RuntimeComponentsProvider.RUNTIME_PROVIDER, RuntimeComponentsProvider.RUNTIME_PROVIDER);
@@ -579,7 +575,7 @@
public static JobSpecification compactFilesIndexJobSpec(Dataset dataset, MetadataProvider metadataProvider,
IStorageComponentProvider storageComponentProvider) throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
index 83403d5..c6dc211 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/IndexUtil.java
@@ -18,7 +18,12 @@
*/
package org.apache.asterix.metadata.utils;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
import org.apache.asterix.common.config.OptimizationConfUtil;
+import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.external.indexing.ExternalFile;
@@ -26,14 +31,18 @@
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.runtime.utils.RuntimeUtils;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.rewriter.base.PhysicalOptimizationConfig;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.job.JobSpecification;
-
-import java.util.Collections;
-import java.util.List;
+import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
+import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
public class IndexUtil {
@@ -90,8 +99,7 @@
}
public static JobSpecification buildDropIndexJobSpec(Index index, MetadataProvider metadataProvider,
- Dataset dataset)
- throws AlgebricksException {
+ Dataset dataset) throws AlgebricksException {
ARecordType recordType =
(ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
@@ -109,8 +117,8 @@
return secondaryIndexHelper.buildDropJobSpec();
}
- public static JobSpecification buildSecondaryIndexCreationJobSpec(Dataset dataset, Index index,
- ARecordType recType, ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType,
+ public static JobSpecification buildSecondaryIndexCreationJobSpec(Dataset dataset, Index index, ARecordType recType,
+ ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType,
MetadataProvider metadataProvider) throws AlgebricksException {
SecondaryIndexOperationsHelper secondaryIndexHelper =
SecondaryIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider,
@@ -137,6 +145,31 @@
return secondaryIndexHelper.buildLoadingJobSpec();
}
+ public static JobSpecification buildDropSecondaryIndexJobSpec(Index index, MetadataProvider metadataProvider,
+ Dataset dataset) throws AlgebricksException {
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+ IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
+ metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName());
+ Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo =
+ DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext());
+ ARecordType recordType =
+ (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
+ ARecordType metaType = DatasetUtil.getMetaType(metadataProvider, dataset);
+ IIndexDataflowHelperFactory dataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
+ index, recordType, metaType, compactionInfo.first, compactionInfo.second);
+ // The index drop operation should be persistent regardless of temp datasets or permanent dataset.
+ IndexDropOperatorDescriptor btreeDrop =
+ new IndexDropOperatorDescriptor(spec, storageComponentProvider.getStorageManager(),
+ storageComponentProvider.getIndexLifecycleManagerProvider(), splitsAndConstraint.first,
+ dataflowHelperFactory, storageComponentProvider.getMetadataPageManagerFactory());
+ AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop,
+ splitsAndConstraint.second);
+ spec.addRoot(btreeDrop);
+
+ return spec;
+ }
+
public static JobSpecification buildSecondaryIndexCompactJobSpec(Dataset dataset, Index index, ARecordType recType,
ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType,
MetadataProvider metadataProvider) throws AlgebricksException {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java
index 59068c8..faea7fd 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryBTreeOperationsHelper.java
@@ -22,7 +22,6 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.GlobalConfig;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.external.indexing.IndexingConstants;
import org.apache.asterix.external.operators.ExternalScanOperatorDescriptor;
@@ -62,25 +61,23 @@
import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
import org.apache.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
-import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
import org.apache.hyracks.storage.common.file.LocalResource;
public class SecondaryBTreeOperationsHelper extends SecondaryTreeIndexOperationsHelper {
protected SecondaryBTreeOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
- IPropertiesProvider propertiesProvider, MetadataProvider metadataProvider, ARecordType recType,
- ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
- super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
- enforcedMetaType);
+ MetadataProvider metadataProvider, ARecordType recType, ARecordType metaType, ARecordType enforcedType,
+ ARecordType enforcedMetaType) {
+ super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
}
@Override
public JobSpecification buildCreationJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
ILocalResourceFactoryProvider localResourceFactoryProvider;
- IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(
- metadataProvider, index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
+ IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
+ index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
//prepare a LocalResourceMetadata which will be stored in NC's local resource repository
@@ -109,9 +106,9 @@
new TreeIndexCreateOperatorDescriptor(spec, storageComponentProvider.getStorageManager(),
storageComponentProvider.getIndexLifecycleManagerProvider(), secondaryFileSplitProvider,
secondaryTypeTraits, secondaryComparatorFactories, secondaryBloomFilterKeyFields,
- indexDataflowHelperFactory, localResourceFactoryProvider,
- dataset.getModificationCallbackFactory(storageComponentProvider, index, null,
- IndexOperation.CREATE, null),
+ indexDataflowHelperFactory,
+ localResourceFactoryProvider, dataset.getModificationCallbackFactory(storageComponentProvider,
+ index, null, IndexOperation.CREATE, null),
storageComponentProvider.getMetadataPageManagerFactory());
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, secondaryIndexCreateOp,
secondaryPartitionConstraint);
@@ -122,7 +119,7 @@
@Override
public JobSpecification buildLoadingJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
int[] fieldPermutation = createFieldPermutationForBulkLoadOp(index.getKeyFieldNames().size());
IIndexDataflowHelperFactory dataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
@@ -269,8 +266,7 @@
sourceColumn = recordColumn + 1;
}
secondaryFieldAccessEvalFactories[i] = metadataProvider.getFormat().getFieldAccessEvaluatorFactory(
- isEnforcingKeyTypes ? enforcedItemType : sourceType, index.getKeyFieldNames().get(i),
- sourceColumn);
+ isEnforcingKeyTypes ? enforcedItemType : sourceType, index.getKeyFieldNames().get(i), sourceColumn);
Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(i),
index.getKeyFieldNames().get(i), sourceType);
IAType keyType = keyTypePair.first;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
index efb3315..e5b5b9f 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
@@ -25,7 +25,6 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.context.ITransactionSubsystemProvider;
import org.apache.asterix.common.context.TransactionSubsystemProvider;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -53,7 +52,6 @@
import org.apache.asterix.runtime.evaluators.functions.IsUnknownDescriptor;
import org.apache.asterix.runtime.evaluators.functions.NotDescriptor;
import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.RuntimeComponentsProvider;
import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
import org.apache.asterix.transaction.management.service.transaction.JobIdFactory;
@@ -118,7 +116,6 @@
protected int[] secondaryBloomFilterKeyFields;
protected RecordDescriptor secondaryRecDesc;
protected IScalarEvaluatorFactory[] secondaryFieldAccessEvalFactories;
- protected IPropertiesProvider propertiesProvider;
protected ILSMMergePolicyFactory mergePolicyFactory;
protected Map<String, String> mergePolicyFactoryProperties;
protected RecordDescriptor enforcedRecDesc;
@@ -135,12 +132,11 @@
// Prevent public construction. Should be created via createIndexCreator().
protected SecondaryIndexOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
- IPropertiesProvider propertiesProvider, MetadataProvider metadataProvider, ARecordType recType,
+ MetadataProvider metadataProvider, ARecordType recType,
ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
this.dataset = dataset;
this.index = index;
this.physOptConf = physOptConf;
- this.propertiesProvider = propertiesProvider;
this.metadataProvider = metadataProvider;
this.itemType = recType;
this.metaType = metaType;
@@ -151,17 +147,16 @@
public static SecondaryIndexOperationsHelper createIndexOperationsHelper(Dataset dataset, Index index,
MetadataProvider metadataProvider, PhysicalOptimizationConfig physOptConf, ARecordType recType,
ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) throws AlgebricksException {
- IPropertiesProvider asterixPropertiesProvider = AppContextInfo.INSTANCE;
SecondaryIndexOperationsHelper indexOperationsHelper;
switch (index.getIndexType()) {
case BTREE:
indexOperationsHelper =
- new SecondaryBTreeOperationsHelper(dataset, index, physOptConf, asterixPropertiesProvider,
+ new SecondaryBTreeOperationsHelper(dataset, index, physOptConf,
metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
break;
case RTREE:
indexOperationsHelper =
- new SecondaryRTreeOperationsHelper(dataset, index, physOptConf, asterixPropertiesProvider,
+ new SecondaryRTreeOperationsHelper(dataset, index, physOptConf,
metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
break;
case SINGLE_PARTITION_WORD_INVIX:
@@ -169,8 +164,7 @@
case LENGTH_PARTITIONED_WORD_INVIX:
case LENGTH_PARTITIONED_NGRAM_INVIX:
indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(dataset, index, physOptConf,
- asterixPropertiesProvider, metadataProvider, recType, metaType, enforcedType,
- enforcedMetaType);
+ metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, index.getIndexType());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
index 5ab36c1..985f8cc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
@@ -18,8 +18,9 @@
*/
package org.apache.asterix.metadata.utils;
+import java.util.Map;
+
import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
@@ -73,8 +74,6 @@
import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
import org.apache.hyracks.storage.common.file.LocalResource;
-import java.util.Map;
-
public class SecondaryInvertedIndexOperationsHelper extends SecondaryIndexOperationsHelper {
private IAType secondaryKeyType;
@@ -92,11 +91,9 @@
private int[] secondaryFilterFieldsForNonBulkLoadOps;
protected SecondaryInvertedIndexOperationsHelper(Dataset dataset, Index index,
- PhysicalOptimizationConfig physOptConf, IPropertiesProvider propertiesProvider,
- MetadataProvider metadataProvider, ARecordType recType, ARecordType metaType, ARecordType enforcedType,
- ARecordType enforcedMetaType) {
- super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
- enforcedMetaType);
+ PhysicalOptimizationConfig physOptConf, MetadataProvider metadataProvider, ARecordType recType,
+ ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
+ super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
}
@Override
@@ -107,9 +104,8 @@
boolean isEnforcingKeyTypes = index.isEnforcingKeyFileds();
// Sanity checks.
if (numPrimaryKeys > 1) {
- throw new CompilationException(
- ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX, indexType,
- RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
+ throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_FOR_DATASET_WITH_COMPOSITE_PRIMARY_INDEX,
+ indexType, RecordUtil.toFullyQualifiedName(dataset.getDataverseName(), dataset.getDatasetName()));
}
if (numSecondaryKeys > 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD, numSecondaryKeys,
@@ -126,16 +122,14 @@
secondaryFieldAccessEvalFactories = new IScalarEvaluatorFactory[numSecondaryKeys + numFilterFields];
ISerializerDeserializer[] secondaryRecFields =
new ISerializerDeserializer[numPrimaryKeys + numSecondaryKeys + numFilterFields];
- ISerializerDeserializer[] enforcedRecFields =
- new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
+ ISerializerDeserializer[] enforcedRecFields = new ISerializerDeserializer[1 + numPrimaryKeys + numFilterFields];
secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys];
ITypeTraits[] enforcedTypeTraits = new ITypeTraits[1 + numPrimaryKeys];
ISerializerDeserializerProvider serdeProvider = FormatUtils.getDefaultFormat().getSerdeProvider();
ITypeTraitProvider typeTraitProvider = FormatUtils.getDefaultFormat().getTypeTraitProvider();
if (numSecondaryKeys > 0) {
secondaryFieldAccessEvalFactories[0] = FormatUtils.getDefaultFormat().getFieldAccessEvaluatorFactory(
- isEnforcingKeyTypes ? enforcedItemType : itemType, index.getKeyFieldNames().get(0),
- numPrimaryKeys);
+ isEnforcingKeyTypes ? enforcedItemType : itemType, index.getKeyFieldNames().get(0), numPrimaryKeys);
Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
index.getKeyFieldNames().get(0), itemType);
secondaryKeyType = keyTypePair.first;
@@ -226,7 +220,7 @@
@Override
public JobSpecification buildCreationJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
//prepare a LocalResourceMetadata which will be stored in NC's local resource repository
IResourceFactory localResourceMetadata = new LSMInvertedIndexLocalResourceMetadataFactory(invListsTypeTraits,
@@ -234,8 +228,7 @@
dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties, filterTypeTraits,
filterCmpFactories, invertedIndexFields, secondaryFilterFields, secondaryFilterFieldsForNonBulkLoadOps,
invertedIndexFieldsForNonBulkLoadOps, dataset.getIndexOperationTrackerFactory(index),
- dataset.getIoOperationCallbackFactory(index),
- storageComponentProvider.getMetadataPageManagerFactory());
+ dataset.getIoOperationCallbackFactory(index), storageComponentProvider.getMetadataPageManagerFactory());
ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
localResourceMetadata, LocalResource.LSMInvertedIndexResource);
@@ -244,9 +237,9 @@
new LSMInvertedIndexCreateOperatorDescriptor(spec, storageComponentProvider.getStorageManager(),
secondaryFileSplitProvider, storageComponentProvider.getIndexLifecycleManagerProvider(),
tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, primaryComparatorFactories,
- tokenizerFactory, dataflowHelperFactory, localResourceFactoryProvider,
- dataset.getModificationCallbackFactory(storageComponentProvider, index, null,
- IndexOperation.CREATE, null),
+ tokenizerFactory, dataflowHelperFactory,
+ localResourceFactoryProvider, dataset.getModificationCallbackFactory(storageComponentProvider,
+ index, null, IndexOperation.CREATE, null),
storageComponentProvider.getMetadataPageManagerFactory());
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, invIndexCreateOp,
secondaryPartitionConstraint);
@@ -257,7 +250,7 @@
@Override
public JobSpecification buildLoadingJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
// Create dummy key provider for feeding the primary index scan.
AbstractOperatorDescriptor keyProviderOp = createDummyKeyProviderOp(spec);
@@ -317,8 +310,8 @@
for (int i = 0; i < primaryKeyFields.length; i++) {
primaryKeyFields[i] = numSecondaryKeys + i;
}
- BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec,
- tokenKeyPairRecDesc, tokenizerFactory, docField, primaryKeyFields, isPartitioned, false);
+ BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec, tokenKeyPairRecDesc,
+ tokenizerFactory, docField, primaryKeyFields, isPartitioned, false);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp,
primaryPartitionConstraint);
return tokenizerOp;
@@ -364,7 +357,7 @@
@Override
public JobSpecification buildCompactJobSpec() throws AsterixException, AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IIndexDataflowHelperFactory dataflowHelperFactory = createDataflowHelperFactory();
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
LSMInvertedIndexCompactOperator compactOp =
@@ -385,7 +378,7 @@
@Override
public JobSpecification buildDropJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName());
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
index 93a88e2..352bc6a 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryRTreeOperationsHelper.java
@@ -22,7 +22,6 @@
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.config.GlobalConfig;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.transactions.IResourceFactory;
@@ -83,15 +82,15 @@
protected RecordDescriptor secondaryRecDescForPointMBR = null;
protected SecondaryRTreeOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
- IPropertiesProvider propertiesProvider, MetadataProvider metadataProvider, ARecordType recType,
+ MetadataProvider metadataProvider, ARecordType recType,
ARecordType metaType, ARecordType enforcedType, ARecordType enforcedMetaType) {
- super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
+ super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType,
enforcedMetaType);
}
@Override
public JobSpecification buildCreationJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(
metadataProvider, index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
@@ -267,7 +266,7 @@
* 3) Bulk-loading in RTree takes 4 doubles by reading 2 doubles twice and then,
* do the same work as non-point MBR cases.
***************************************************/
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
int[] fieldPermutation = createFieldPermutationForBulkLoadOp(numNestedSecondaryKeyFields);
int numNestedSecondaryKeFieldsConsideringPointMBR =
isPointMBR ? numNestedSecondaryKeyFields / 2 : numNestedSecondaryKeyFields;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java
index 2b025c9..c7af7ba 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryTreeIndexOperationsHelper.java
@@ -19,7 +19,8 @@
package org.apache.asterix.metadata.utils;
-import org.apache.asterix.common.config.IPropertiesProvider;
+import java.util.Map;
+
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
@@ -40,25 +41,17 @@
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
-import java.util.Map;
-
public abstract class SecondaryTreeIndexOperationsHelper extends SecondaryIndexOperationsHelper {
- protected SecondaryTreeIndexOperationsHelper(Dataset dataset,
- Index index,
- PhysicalOptimizationConfig physOptConf,
- IPropertiesProvider propertiesProvider,
- MetadataProvider metadataProvider,
- ARecordType recType, ARecordType metaType,
- ARecordType enforcedType,
+ protected SecondaryTreeIndexOperationsHelper(Dataset dataset, Index index, PhysicalOptimizationConfig physOptConf,
+ MetadataProvider metadataProvider, ARecordType recType, ARecordType metaType, ARecordType enforcedType,
ARecordType enforcedMetaType) {
- super(dataset, index, physOptConf, propertiesProvider, metadataProvider, recType, metaType, enforcedType,
- enforcedMetaType);
+ super(dataset, index, physOptConf, metadataProvider, recType, metaType, enforcedType, enforcedMetaType);
}
@Override
public JobSpecification buildDropJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider();
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName());
@@ -82,9 +75,9 @@
@Override
public JobSpecification buildCompactJobSpec() throws AlgebricksException {
- JobSpecification spec = RuntimeUtils.createJobSpecification();
- IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(
- metadataProvider, index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
+ JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext());
+ IIndexDataflowHelperFactory indexDataflowHelperFactory = dataset.getIndexDataflowHelperFactory(metadataProvider,
+ index, itemType, metaType, mergePolicyFactory, mergePolicyFactoryProperties);
LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
metadataProvider.getStorageComponentProvider().getStorageManager(),
metadataProvider.getStorageComponentProvider().getIndexLifecycleManagerProvider(),
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
index dd40b04..5e58802 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationChannel.java
@@ -44,7 +44,6 @@
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.context.IndexInfo;
import org.apache.asterix.common.exceptions.ACIDException;
@@ -117,8 +116,8 @@
this.replicationProperties = replicationProperties;
this.appContextProvider = asterixAppRuntimeContextProvider;
this.dsLifecycleManager = asterixAppRuntimeContextProvider.getDatasetLifecycleManager();
- this.localResourceRep = (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider
- .getLocalResourceRepository();
+ this.localResourceRep =
+ (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider.getLocalResourceRepository();
lsmComponentRemoteLSN2LocalLSNMappingTaskQ = new LinkedBlockingQueue<>();
pendingNotificationRemoteLogsQ = new LinkedBlockingQueue<>();
lsmComponentId2PropertiesMap = new ConcurrentHashMap<>();
@@ -148,8 +147,8 @@
try {
serverSocketChannel = ServerSocketChannel.open();
serverSocketChannel.configureBlocking(true);
- InetSocketAddress replicationChannelAddress = new InetSocketAddress(InetAddress.getByName(nodeIP),
- dataPort);
+ InetSocketAddress replicationChannelAddress =
+ new InetSocketAddress(InetAddress.getByName(nodeIP), dataPort);
serverSocketChannel.socket().bind(replicationChannelAddress);
lsmComponentLSNMappingService.start();
replicationNotifier.start();
@@ -176,8 +175,9 @@
if (remainingFile == 0) {
if (lsmCompProp.getOpType() == LSMOperationType.FLUSH && lsmCompProp.getReplicaLSN() != null
&& replicaUniqueLSN2RemoteMapping.containsKey(lsmCompProp.getNodeUniqueLSN())) {
- int remainingIndexes = replicaUniqueLSN2RemoteMapping
- .get(lsmCompProp.getNodeUniqueLSN()).numOfFlushedIndexes.decrementAndGet();
+ int remainingIndexes =
+ replicaUniqueLSN2RemoteMapping.get(lsmCompProp.getNodeUniqueLSN()).numOfFlushedIndexes
+ .decrementAndGet();
if (remainingIndexes == 0) {
/**
* Note: there is a chance that this will never be removed because some
@@ -223,8 +223,8 @@
public void run() {
Thread.currentThread().setName("Replication Thread");
try {
- ReplicationRequestType replicationFunction = ReplicationProtocol.getRequestType(socketChannel,
- inBuffer);
+ ReplicationRequestType replicationFunction =
+ ReplicationProtocol.getRequestType(socketChannel, inBuffer);
while (replicationFunction != ReplicationRequestType.GOODBYE) {
switch (replicationFunction) {
case REPLICATE_LOG:
@@ -288,8 +288,8 @@
Set<Integer> datasetsToForceFlush = new HashSet<>();
for (IndexInfo iInfo : openIndexesInfo) {
if (requestedIndexesToBeFlushed.contains(iInfo.getResourceId())) {
- AbstractLSMIOOperationCallback ioCallback = (AbstractLSMIOOperationCallback) iInfo.getIndex()
- .getIOOperationCallback();
+ AbstractLSMIOOperationCallback ioCallback =
+ (AbstractLSMIOOperationCallback) iInfo.getIndex().getIOOperationCallback();
//if an index has a pending flush, then the request to flush it will succeed.
if (ioCallback.hasPendingFlush()) {
//remove index to indicate that it will be flushed
@@ -380,8 +380,8 @@
List<String> filesList;
Set<Integer> partitionIds = request.getPartitionIds();
Set<String> requesterExistingFiles = request.getExistingFiles();
- Map<Integer, ClusterPartition> clusterPartitions = ((IPropertiesProvider) appContextProvider
- .getAppContext()).getMetadataProperties().getClusterPartitions();
+ Map<Integer, ClusterPartition> clusterPartitions =
+ appContextProvider.getAppContext().getMetadataProperties().getClusterPartitions();
final IReplicationStrategy repStrategy = replicationProperties.getReplicationStrategy();
// Flush replicated datasets to generate the latest LSM components
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
index 447021c..da45e42 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/ReplicationManager.java
@@ -53,7 +53,6 @@
import java.util.stream.Collectors;
import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.dataflow.LSMIndexUtil;
import org.apache.asterix.common.replication.IReplicaResourcesManager;
@@ -149,8 +148,8 @@
this.replicaResourcesManager = (ReplicaResourcesManager) remoteResoucesManager;
this.asterixAppRuntimeContextProvider = asterixAppRuntimeContextProvider;
this.logManager = logManager;
- localResourceRepo = (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider
- .getLocalResourceRepository();
+ localResourceRepo =
+ (PersistentLocalResourceRepository) asterixAppRuntimeContextProvider.getLocalResourceRepository();
this.hostIPAddressFirstOctet = replicationProperties.getReplicaIPAddress(nodeId).substring(0, 3);
replicas = new HashMap<>();
replicationJobsQ = new LinkedBlockingQueue<>();
@@ -170,8 +169,8 @@
replicationListenerThreads = Executors.newCachedThreadPool();
replicationJobsProcessor = new ReplicationJobsProccessor();
- Map<String, ClusterPartition[]> nodePartitions = ((IPropertiesProvider) asterixAppRuntimeContextProvider
- .getAppContext()).getMetadataProperties().getNodePartitions();
+ Map<String, ClusterPartition[]> nodePartitions =
+ asterixAppRuntimeContextProvider.getAppContext().getMetadataProperties().getNodePartitions();
replica2PartitionsMap = new HashMap<>(replicaNodes.size());
for (Replica replica : replicaNodes) {
replicas.put(replica.getId(), replica);
@@ -347,8 +346,7 @@
requestBuffer = ReplicationProtocol.writeFileReplicationRequest(requestBuffer,
asterixFileProperties, ReplicationRequestType.REPLICATE_FILE);
- Iterator<Map.Entry<String, SocketChannel>> iterator =
- replicasSockets.entrySet().iterator();
+ Iterator<Map.Entry<String, SocketChannel>> iterator = replicasSockets.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, SocketChannel> entry = iterator.next();
//if the remote replica is not interested in this partition, skip it.
@@ -798,8 +796,7 @@
//if got ACKs from all remote replicas, notify pending jobs if any
- if (jobCommitAcks.get(jobId).size() == replicationFactor
- && replicationJobsPendingAcks.containsKey(jobId)) {
+ if (jobCommitAcks.get(jobId).size() == replicationFactor && replicationJobsPendingAcks.containsKey(jobId)) {
ILogRecord pendingLog = replicationJobsPendingAcks.get(jobId);
synchronized (pendingLog) {
pendingLog.notifyAll();
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
index 7441ec7..7c7a050 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/recovery/RemoteRecoveryManager.java
@@ -29,11 +29,10 @@
import java.util.logging.Logger;
import java.util.stream.Collectors;
-import org.apache.asterix.common.api.IAppRuntimeContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.cluster.ClusterPartition;
import org.apache.asterix.common.config.ClusterProperties;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.replication.IRemoteRecoveryManager;
@@ -48,11 +47,11 @@
private final IReplicationManager replicationManager;
private static final Logger LOGGER = Logger.getLogger(RemoteRecoveryManager.class.getName());
- private final IAppRuntimeContext runtimeContext;
+ private final INcApplicationContext runtimeContext;
private final ReplicationProperties replicationProperties;
private Map<String, Set<String>> failbackRecoveryReplicas;
- public RemoteRecoveryManager(IReplicationManager replicationManager, IAppRuntimeContext runtimeContext,
+ public RemoteRecoveryManager(IReplicationManager replicationManager, INcApplicationContext runtimeContext,
ReplicationProperties replicationProperties) {
this.replicationManager = replicationManager;
this.runtimeContext = runtimeContext;
@@ -156,8 +155,8 @@
replayReplicaPartitionLogs(partitionsToTakeover, false);
//mark these partitions as active in this node
- PersistentLocalResourceRepository resourceRepository = (PersistentLocalResourceRepository) runtimeContext
- .getLocalResourceRepository();
+ PersistentLocalResourceRepository resourceRepository =
+ (PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository();
for (Integer patitionId : partitions) {
resourceRepository.addActivePartition(patitionId);
}
@@ -166,11 +165,10 @@
@Override
public void startFailbackProcess() {
int maxRecoveryAttempts = replicationProperties.getMaxRemoteRecoveryAttempts();
- PersistentLocalResourceRepository resourceRepository = (PersistentLocalResourceRepository) runtimeContext
- .getLocalResourceRepository();
+ PersistentLocalResourceRepository resourceRepository =
+ (PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository();
IDatasetLifecycleManager datasetLifeCycleManager = runtimeContext.getDatasetLifecycleManager();
- Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties()
- .getNodePartitions();
+ Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties().getNodePartitions();
while (true) {
//start recovery steps
@@ -225,10 +223,9 @@
@Override
public void completeFailbackProcess() throws IOException, InterruptedException {
ILogManager logManager = runtimeContext.getTransactionSubsystem().getLogManager();
- ReplicaResourcesManager replicaResourcesManager = (ReplicaResourcesManager) runtimeContext
- .getReplicaResourcesManager();
- Map<String, ClusterPartition[]> nodePartitions = ((IPropertiesProvider) runtimeContext).getMetadataProperties()
- .getNodePartitions();
+ ReplicaResourcesManager replicaResourcesManager =
+ (ReplicaResourcesManager) runtimeContext.getReplicaResourcesManager();
+ Map<String, ClusterPartition[]> nodePartitions = runtimeContext.getMetadataProperties().getNodePartitions();
/**
* for each lost partition, get the remaining files from replicas
@@ -281,8 +278,8 @@
@Override
public void doRemoteRecoveryPlan(Map<String, Set<Integer>> recoveryPlan) throws HyracksDataException {
int maxRecoveryAttempts = replicationProperties.getMaxRemoteRecoveryAttempts();
- PersistentLocalResourceRepository resourceRepository = (PersistentLocalResourceRepository) runtimeContext
- .getLocalResourceRepository();
+ PersistentLocalResourceRepository resourceRepository =
+ (PersistentLocalResourceRepository) runtimeContext.getLocalResourceRepository();
IDatasetLifecycleManager datasetLifeCycleManager = runtimeContext.getDatasetLifecycleManager();
ILogManager logManager = runtimeContext.getTransactionSubsystem().getLogManager();
while (true) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
index b182add..2749b5a 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.runtime.job.listener;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.DatasetId;
import org.apache.asterix.common.transactions.ITransactionContext;
@@ -51,7 +51,7 @@
@Override
public void jobletFinish(JobStatus jobStatus) {
try {
- ITransactionManager txnManager = ((IAppRuntimeContext) jobletContext.getServiceContext()
+ ITransactionManager txnManager = ((INcApplicationContext) jobletContext.getServiceContext()
.getApplicationContext()).getTransactionSubsystem().getTransactionManager();
ITransactionContext txnContext = txnManager.getTransactionContext(jobId, false);
txnContext.setWriteTxn(transactionalWrite);
@@ -65,7 +65,7 @@
@Override
public void jobletStart() {
try {
- ((IAppRuntimeContext) jobletContext.getServiceContext().getApplicationContext())
+ ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
.getTransactionSubsystem().getTransactionManager().getTransactionContext(jobId, true);
} catch (ACIDException e) {
throw new Error(e);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
index c194d64..f41f326 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
@@ -20,7 +20,7 @@
import java.util.List;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.DatasetId;
import org.apache.asterix.common.transactions.ITransactionContext;
@@ -54,7 +54,7 @@
public void jobletFinish(JobStatus jobStatus) {
try {
ITransactionManager txnManager =
- ((IAppRuntimeContext) jobletContext.getServiceContext().getApplicationContext())
+ ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
.getTransactionSubsystem().getTransactionManager();
for (JobId jobId : jobIds) {
ITransactionContext txnContext = txnManager.getTransactionContext(jobId, false);
@@ -71,7 +71,7 @@
public void jobletStart() {
try {
for (JobId jobId : jobIds) {
- ((IAppRuntimeContext) jobletContext.getServiceContext().getApplicationContext())
+ ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
.getTransactionSubsystem().getTransactionManager().getTransactionContext(jobId, true);
}
} catch (ACIDException e) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
index 8eb3663..fe230b4 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReplicaEventMessage.java
@@ -18,16 +18,15 @@
*/
package org.apache.asterix.runtime.message;
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.common.replication.Replica;
import org.apache.asterix.common.replication.ReplicaEvent;
import org.apache.asterix.event.schema.cluster.Node;
import org.apache.hyracks.api.application.IClusterLifecycleListener.ClusterEventType;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class ReplicaEventMessage implements IApplicationMessage {
+public class ReplicaEventMessage implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
private final String nodeId;
@@ -53,8 +52,7 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appContext = (IAppRuntimeContext) cs.getApplicationContext();
+ public void handle(INcApplicationContext appContext) throws HyracksDataException, InterruptedException {
Node node = new Node();
node.setId(nodeId);
node.setClusterIp(nodeIPAddress);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
index fc2650e..277c0ba 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdMessage.java
@@ -21,17 +21,16 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.messaging.api.INCMessageBroker;
import org.apache.asterix.common.metadata.MetadataIndexImmutableProperties;
import org.apache.asterix.common.transactions.IResourceIdManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.nc.NodeControllerService;
-public class ReportMaxResourceIdMessage implements IApplicationMessage {
+public class ReportMaxResourceIdMessage implements ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(ReportMaxResourceIdMessage.class.getName());
private final long maxResourceId;
@@ -47,15 +46,14 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IResourceIdManager resourceIdManager =
- AppContextInfo.INSTANCE.getResourceIdManager();
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ IResourceIdManager resourceIdManager = appCtx.getResourceIdManager();
resourceIdManager.report(src, maxResourceId);
}
public static void send(NodeControllerService cs) throws HyracksDataException {
NodeControllerService ncs = cs;
- IAppRuntimeContext appContext = (IAppRuntimeContext) ncs.getApplicationContext();
+ INcApplicationContext appContext = (INcApplicationContext) ncs.getApplicationContext();
long maxResourceId = Math.max(appContext.getLocalResourceRepository().maxId(),
MetadataIndexImmutableProperties.FIRST_AVAILABLE_USER_DATASET_ID);
ReportMaxResourceIdMessage maxResourceIdMsg = new ReportMaxResourceIdMessage(ncs.getId(), maxResourceId);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
index a1290df..a43376d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ReportMaxResourceIdRequestMessage.java
@@ -18,17 +18,17 @@
*/
package org.apache.asterix.runtime.message;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
import org.apache.hyracks.control.nc.NodeControllerService;
-public class ReportMaxResourceIdRequestMessage implements IApplicationMessage {
+public class ReportMaxResourceIdRequestMessage implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- ReportMaxResourceIdMessage.send((NodeControllerService) cs);
+ public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
+ ReportMaxResourceIdMessage.send((NodeControllerService) appCtx.getServiceContext().getControllerService());
}
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
index c8aef37..194fd59 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestMessage.java
@@ -20,15 +20,14 @@
import java.util.Set;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.messaging.api.ICCMessageBroker;
+import org.apache.asterix.common.messaging.api.ICcAddressedMessage;
import org.apache.asterix.common.transactions.IResourceIdManager;
-import org.apache.asterix.runtime.utils.AppContextInfo;
import org.apache.asterix.runtime.utils.ClusterStateManager;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class ResourceIdRequestMessage implements IApplicationMessage {
+public class ResourceIdRequestMessage implements ICcAddressedMessage {
private static final long serialVersionUID = 1L;
private final String src;
@@ -37,17 +36,15 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
+ public void handle(ICcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
try {
- ICCMessageBroker broker =
- (ICCMessageBroker) AppContextInfo.INSTANCE.getCCServiceContext().getMessageBroker();
+ ICCMessageBroker broker = (ICCMessageBroker) appCtx.getServiceContext().getMessageBroker();
ResourceIdRequestResponseMessage reponse = new ResourceIdRequestResponseMessage();
if (!ClusterStateManager.INSTANCE.isClusterActive()) {
reponse.setResourceId(-1);
reponse.setException(new Exception("Cannot generate global resource id when cluster is not active."));
} else {
- IResourceIdManager resourceIdManager =
- AppContextInfo.INSTANCE.getResourceIdManager();
+ IResourceIdManager resourceIdManager = appCtx.getResourceIdManager();
reponse.setResourceId(resourceIdManager.createResourceId());
if (reponse.getResourceId() < 0) {
reponse.setException(new Exception("One or more nodes has not reported max resource id."));
@@ -60,8 +57,7 @@
}
}
- private void requestMaxResourceID(IResourceIdManager resourceIdManager, ICCMessageBroker broker)
- throws Exception {
+ private void requestMaxResourceID(IResourceIdManager resourceIdManager, ICCMessageBroker broker) throws Exception {
Set<String> getParticipantNodes = ClusterStateManager.INSTANCE.getParticipantNodes();
ReportMaxResourceIdRequestMessage msg = new ReportMaxResourceIdRequestMessage();
for (String nodeId : getParticipantNodes) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java
index 1106da9..6a9ed35 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/message/ResourceIdRequestResponseMessage.java
@@ -18,13 +18,12 @@
*/
package org.apache.asterix.runtime.message;
-import org.apache.asterix.common.api.IAppRuntimeContext;
-import org.apache.asterix.common.messaging.api.IApplicationMessage;
+import org.apache.asterix.common.api.INcApplicationContext;
+import org.apache.asterix.common.messaging.api.INcAddressedMessage;
import org.apache.asterix.runtime.transaction.GlobalResourceIdFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.service.IControllerService;
-public class ResourceIdRequestResponseMessage implements IApplicationMessage {
+public class ResourceIdRequestResponseMessage implements INcAddressedMessage {
private static final long serialVersionUID = 1L;
private long resourceId;
@@ -47,8 +46,7 @@
}
@Override
- public void handle(IControllerService cs) throws HyracksDataException, InterruptedException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) cs.getApplicationContext();
+ public void handle(INcApplicationContext appCtx) throws HyracksDataException, InterruptedException {
((GlobalResourceIdFactory) appCtx.getResourceIdFactory()).addNewIds(this);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
index d8c67bf..ed1a247 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/LSMPrimaryUpsertOperatorNodePushable.java
@@ -22,7 +22,7 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.dataflow.LSMIndexUtil;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.exceptions.AsterixException;
@@ -157,8 +157,8 @@
cursor = indexAccessor.createSearchCursor(false);
frameTuple = new FrameTupleReference();
- IAppRuntimeContext appCtx =
- (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+ INcApplicationContext appCtx =
+ (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
LSMIndexUtil.checkAndSetFirstLSN((AbstractLSMIndex) index,
appCtx.getTransactionSubsystem().getLogManager());
frameOpCallback =
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java
index aca6455..85e92c3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/operators/std/FlushDatasetOperatorDescriptor.java
@@ -20,7 +20,7 @@
import java.nio.ByteBuffer;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.DatasetId;
@@ -72,7 +72,7 @@
@Override
public void close() throws HyracksDataException {
try {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) ctx.getJobletContext()
+ INcApplicationContext appCtx = (INcApplicationContext) ctx.getJobletContext()
.getServiceContext().getApplicationContext();
IDatasetLifecycleManager datasetLifeCycleManager = appCtx.getDatasetLifecycleManager();
ILockManager lockManager = appCtx.getTransactionSubsystem().getLockManager();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
similarity index 74%
rename from asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
rename to asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
index ad510e4..8608d68 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/AppContextInfo.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/CcApplicationContext.java
@@ -20,7 +20,6 @@
import java.io.IOException;
import java.util.function.Supplier;
-import java.util.logging.Logger;
import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
import org.apache.asterix.common.config.ActiveProperties;
@@ -28,7 +27,6 @@
import org.apache.asterix.common.config.CompilerProperties;
import org.apache.asterix.common.config.ExtensionProperties;
import org.apache.asterix.common.config.ExternalProperties;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.MessagingProperties;
import org.apache.asterix.common.config.MetadataProperties;
import org.apache.asterix.common.config.NodeProperties;
@@ -36,7 +34,7 @@
import org.apache.asterix.common.config.ReplicationProperties;
import org.apache.asterix.common.config.StorageProperties;
import org.apache.asterix.common.config.TransactionProperties;
-import org.apache.asterix.common.dataflow.IApplicationContextInfo;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.library.ILibraryManager;
import org.apache.asterix.common.metadata.IMetadataBootstrap;
@@ -44,6 +42,7 @@
import org.apache.asterix.common.transactions.IResourceIdManager;
import org.apache.hyracks.api.application.ICCServiceContext;
import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.IJobLifecycleListener;
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.common.IStorageManager;
@@ -52,9 +51,8 @@
* instances that are accessed from the NCs. In addition an instance of ICCApplicationContext
* is stored for access by the CC.
*/
-public class AppContextInfo implements IApplicationContextInfo, IPropertiesProvider {
+public class CcApplicationContext implements ICcApplicationContext {
- public static final AppContextInfo INSTANCE = new AppContextInfo();
private ICCServiceContext ccServiceCtx;
private IGlobalRecoveryManager globalRecoveryManager;
private ILibraryManager libraryManager;
@@ -73,51 +71,41 @@
private Supplier<IMetadataBootstrap> metadataBootstrapSupplier;
private IHyracksClientConnection hcc;
private Object extensionManager;
- private volatile boolean initialized = false;
private IFaultToleranceStrategy ftStrategy;
+ private IJobLifecycleListener activeLifeCycleListener;
- private AppContextInfo() {
- }
-
- public static synchronized void initialize(ICCServiceContext ccServiceCtx, IHyracksClientConnection hcc,
+ public CcApplicationContext(ICCServiceContext ccServiceCtx, IHyracksClientConnection hcc,
ILibraryManager libraryManager, IResourceIdManager resourceIdManager,
Supplier<IMetadataBootstrap> metadataBootstrapSupplier, IGlobalRecoveryManager globalRecoveryManager,
- IFaultToleranceStrategy ftStrategy)
+ IFaultToleranceStrategy ftStrategy, IJobLifecycleListener activeLifeCycleListener)
throws AsterixException, IOException {
- if (INSTANCE.initialized) {
- throw new AsterixException(AppContextInfo.class.getSimpleName() + " has been initialized already");
- }
- INSTANCE.initialized = true;
- INSTANCE.ccServiceCtx = ccServiceCtx;
- INSTANCE.hcc = hcc;
- INSTANCE.libraryManager = libraryManager;
- INSTANCE.resourceIdManager = resourceIdManager;
+ this.ccServiceCtx = ccServiceCtx;
+ this.hcc = hcc;
+ this.libraryManager = libraryManager;
+ this.resourceIdManager = resourceIdManager;
+ this.activeLifeCycleListener = activeLifeCycleListener;
// Determine whether to use old-style asterix-configuration.xml or new-style configuration.
// QQQ strip this out eventually
PropertiesAccessor propertiesAccessor = PropertiesAccessor.getInstance(ccServiceCtx.getAppConfig());
- INSTANCE.compilerProperties = new CompilerProperties(propertiesAccessor);
- INSTANCE.externalProperties = new ExternalProperties(propertiesAccessor);
- INSTANCE.metadataProperties = new MetadataProperties(propertiesAccessor);
- INSTANCE.storageProperties = new StorageProperties(propertiesAccessor);
- INSTANCE.txnProperties = new TransactionProperties(propertiesAccessor);
- INSTANCE.activeProperties = new ActiveProperties(propertiesAccessor);
- INSTANCE.extensionProperties = new ExtensionProperties(propertiesAccessor);
- INSTANCE.replicationProperties = new ReplicationProperties(propertiesAccessor);
- INSTANCE.ftStrategy = ftStrategy;
- INSTANCE.hcc = hcc;
- INSTANCE.buildProperties = new BuildProperties(propertiesAccessor);
- INSTANCE.messagingProperties = new MessagingProperties(propertiesAccessor);
- INSTANCE.nodeProperties = new NodeProperties(propertiesAccessor);
- INSTANCE.metadataBootstrapSupplier = metadataBootstrapSupplier;
- INSTANCE.globalRecoveryManager = globalRecoveryManager;
- }
-
- public boolean initialized() {
- return initialized;
+ compilerProperties = new CompilerProperties(propertiesAccessor);
+ externalProperties = new ExternalProperties(propertiesAccessor);
+ metadataProperties = new MetadataProperties(propertiesAccessor);
+ storageProperties = new StorageProperties(propertiesAccessor);
+ txnProperties = new TransactionProperties(propertiesAccessor);
+ activeProperties = new ActiveProperties(propertiesAccessor);
+ extensionProperties = new ExtensionProperties(propertiesAccessor);
+ replicationProperties = new ReplicationProperties(propertiesAccessor);
+ this.ftStrategy = ftStrategy;
+ this.hcc = hcc;
+ this.buildProperties = new BuildProperties(propertiesAccessor);
+ this.messagingProperties = new MessagingProperties(propertiesAccessor);
+ this.nodeProperties = new NodeProperties(propertiesAccessor);
+ this.metadataBootstrapSupplier = metadataBootstrapSupplier;
+ this.globalRecoveryManager = globalRecoveryManager;
}
@Override
- public ICCServiceContext getCCServiceContext() {
+ public ICCServiceContext getServiceContext() {
return ccServiceCtx;
}
@@ -156,6 +144,7 @@
return buildProperties;
}
+ @Override
public IHyracksClientConnection getHcc() {
return hcc;
}
@@ -207,6 +196,7 @@
return nodeProperties;
}
+ @Override
public IResourceIdManager getResourceIdManager() {
return resourceIdManager;
}
@@ -218,4 +208,9 @@
public IFaultToleranceStrategy getFaultToleranceStrategy() {
return ftStrategy;
}
+
+ @Override
+ public IJobLifecycleListener getActiveLifecycleListener() {
+ return activeLifeCycleListener;
+ }
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index 6bfbf77..2d8a04d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -76,17 +76,19 @@
private boolean metadataNodeActive = false;
private Set<String> failedNodes = new HashSet<>();
private IFaultToleranceStrategy ftStrategy;
+ private CcApplicationContext appCtx;
private ClusterStateManager() {
cluster = ClusterProperties.INSTANCE.getCluster();
- // if this is the CC process
- if (AppContextInfo.INSTANCE.initialized() && AppContextInfo.INSTANCE.getCCServiceContext() != null) {
- node2PartitionsMap = AppContextInfo.INSTANCE.getMetadataProperties().getNodePartitions();
- clusterPartitions = AppContextInfo.INSTANCE.getMetadataProperties().getClusterPartitions();
- currentMetadataNode = AppContextInfo.INSTANCE.getMetadataProperties().getMetadataNodeName();
- ftStrategy = AppContextInfo.INSTANCE.getFaultToleranceStrategy();
- ftStrategy.bindTo(this);
- }
+ }
+
+ public void setCcAppCtx(CcApplicationContext appCtx) {
+ this.appCtx = appCtx;
+ node2PartitionsMap = appCtx.getMetadataProperties().getNodePartitions();
+ clusterPartitions = appCtx.getMetadataProperties().getClusterPartitions();
+ currentMetadataNode = appCtx.getMetadataProperties().getMetadataNodeName();
+ ftStrategy = appCtx.getFaultToleranceStrategy();
+ ftStrategy.bindTo(this);
}
public synchronized void removeNCConfiguration(String nodeId) throws HyracksException {
@@ -162,12 +164,12 @@
// if all storage partitions are active as well as the metadata node, then the cluster is active
if (metadataNodeActive) {
- AppContextInfo.INSTANCE.getMetadataBootstrap().init();
+ appCtx.getMetadataBootstrap().init();
setState(ClusterState.ACTIVE);
LOGGER.info("Cluster is now " + state);
notifyAll();
// start global recovery
- AppContextInfo.INSTANCE.getGlobalRecoveryManager().startGlobalRecovery();
+ appCtx.getGlobalRecoveryManager().startGlobalRecovery(appCtx);
}
}
@@ -210,7 +212,7 @@
}
return new String[0];
}
- return (String [])ncConfig.get(NCConfig.Option.IODEVICES);
+ return (String[]) ncConfig.get(NCConfig.Option.IODEVICES);
}
@Override
@@ -245,8 +247,8 @@
clusterActiveLocations.add(p.getActiveNodeId());
}
}
- clusterPartitionConstraint = new AlgebricksAbsolutePartitionConstraint(
- clusterActiveLocations.toArray(new String[] {}));
+ clusterPartitionConstraint =
+ new AlgebricksAbsolutePartitionConstraint(clusterActiveLocations.toArray(new String[] {}));
}
public boolean isGlobalRecoveryCompleted() {
@@ -265,8 +267,8 @@
return state == ClusterState.ACTIVE;
}
- public static int getNumberOfNodes() {
- return AppContextInfo.INSTANCE.getMetadataProperties().getNodeNames().size();
+ public int getNumberOfNodes() {
+ return appCtx.getMetadataProperties().getNodeNames().size();
}
@Override
@@ -294,13 +296,13 @@
return metadataNodeActive;
}
- public synchronized ObjectNode getClusterStateDescription() {
+ public synchronized ObjectNode getClusterStateDescription() {
ObjectMapper om = new ObjectMapper();
ObjectNode stateDescription = om.createObjectNode();
stateDescription.put("state", state.name());
stateDescription.put("metadata_node", currentMetadataNode);
ArrayNode ncs = om.createArrayNode();
- stateDescription.set("ncs",ncs);
+ stateDescription.set("ncs", ncs);
for (Map.Entry<String, ClusterPartition[]> entry : node2PartitionsMap.entrySet()) {
ObjectNode nodeJSON = om.createObjectNode();
nodeJSON.put("node_id", entry.getKey());
@@ -318,9 +320,7 @@
}
}
nodeJSON.put("state", failedNodes.contains(entry.getKey()) ? "FAILED"
- : allActive ? "ACTIVE"
- : anyActive ? "PARTIALLY_ACTIVE"
- : "INACTIVE");
+ : allActive ? "ACTIVE" : anyActive ? "PARTIALLY_ACTIVE" : "INACTIVE");
nodeJSON.putPOJO("partitions", partitions);
ncs.add(nodeJSON);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java
index 387e949..353a45c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeComponentsProvider.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.runtime.utils;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
@@ -42,37 +42,37 @@
@Override
public ILSMIOOperationScheduler getIOScheduler(IHyracksTaskContext ctx) {
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getLSMIOScheduler();
}
@Override
public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getBufferCache();
}
@Override
public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getFileMapManager();
}
@Override
public ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx) {
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getLocalResourceRepository();
}
@Override
public IDatasetLifecycleManager getLifecycleManager(IHyracksTaskContext ctx) {
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getDatasetLifecycleManager();
}
@Override
public IResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
- return ((IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
+ return ((INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext())
.getResourceIdFactory();
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java
index a2a191d..85e93b8 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/RuntimeUtils.java
@@ -27,6 +27,7 @@
import java.util.Set;
import org.apache.asterix.common.config.CompilerProperties;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.control.cc.ClusterControllerService;
@@ -37,13 +38,14 @@
private RuntimeUtils() {
}
- public static Set<String> getNodeControllersOnIP(InetAddress ipAddress) throws HyracksDataException {
- Map<InetAddress, Set<String>> nodeControllerInfo = getNodeControllerMap();
+ public static Set<String> getNodeControllersOnIP(ICcApplicationContext appCtx, InetAddress ipAddress)
+ throws HyracksDataException {
+ Map<InetAddress, Set<String>> nodeControllerInfo = getNodeControllerMap(appCtx);
return nodeControllerInfo.get(ipAddress);
}
- public static List<String> getAllNodeControllers() throws HyracksDataException {
- Collection<Set<String>> nodeControllersCollection = getNodeControllerMap().values();
+ public static List<String> getAllNodeControllers(ICcApplicationContext appCtx) throws HyracksDataException {
+ Collection<Set<String>> nodeControllersCollection = getNodeControllerMap(appCtx).values();
List<String> nodeControllers = new ArrayList<>();
for (Set<String> ncCollection : nodeControllersCollection) {
nodeControllers.addAll(ncCollection);
@@ -51,21 +53,21 @@
return nodeControllers;
}
- public static Map<InetAddress, Set<String>> getNodeControllerMap() throws HyracksDataException {
+ public static Map<InetAddress, Set<String>> getNodeControllerMap(ICcApplicationContext appCtx)
+ throws HyracksDataException {
Map<InetAddress, Set<String>> map = new HashMap<>();
- AppContextInfo.INSTANCE.getCCServiceContext().getCCContext().getIPAddressNodeMap(map);
+ appCtx.getServiceContext().getCCContext().getIPAddressNodeMap(map);
return map;
}
- public static void getNodeControllerMap(Map<InetAddress, Set<String>> map) {
- ClusterControllerService ccs =
- (ClusterControllerService) AppContextInfo.INSTANCE.getCCServiceContext().getControllerService();
+ public static void getNodeControllerMap(ICcApplicationContext appCtx, Map<InetAddress, Set<String>> map) {
+ ClusterControllerService ccs = (ClusterControllerService) appCtx.getServiceContext().getControllerService();
INodeManager nodeManager = ccs.getNodeManager();
map.putAll(nodeManager.getIpAddressNodeNameMap());
}
- public static JobSpecification createJobSpecification() {
- CompilerProperties compilerProperties = AppContextInfo.INSTANCE.getCompilerProperties();
+ public static JobSpecification createJobSpecification(ICcApplicationContext appCtx) {
+ CompilerProperties compilerProperties = appCtx.getCompilerProperties();
int frameSize = compilerProperties.getFrameSize();
return new JobSpecification(frameSize);
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java
index c553bc0..5e7cf97 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/PrimaryIndexOperationTrackerFactory.java
@@ -19,7 +19,7 @@
package org.apache.asterix.transaction.management.opcallbacks;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
@@ -38,7 +38,7 @@
@Override
public ILSMOperationTracker getOperationTracker(INCServiceContext ctx) {
IDatasetLifecycleManager dslcManager =
- ((IAppRuntimeContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
+ ((INcApplicationContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
return dslcManager.getOperationTracker(datasetID);
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java
index 4832acd..febcac2 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexOperationTrackerFactory.java
@@ -18,7 +18,7 @@
*/
package org.apache.asterix.transaction.management.opcallbacks;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.context.BaseOperationTracker;
import org.apache.hyracks.api.application.INCServiceContext;
@@ -38,7 +38,7 @@
@Override
public ILSMOperationTracker getOperationTracker(INCServiceContext ctx) {
IDatasetLifecycleManager dslcManager =
- ((IAppRuntimeContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
+ ((INcApplicationContext) ctx.getApplicationContext()).getDatasetLifecycleManager();
return new BaseOperationTracker(datasetID, dslcManager.getDatasetInfo(datasetID));
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
index d10a9a9..0fca60d 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeLocalResourceMetadata.java
@@ -20,7 +20,7 @@
import java.util.Map;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
@@ -52,7 +52,7 @@
@Override
public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
throws HyracksDataException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+ INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
IIOManager ioManager = appCtx.getIOManager();
FileReference file = ioManager.resolve(resource.getPath());
return LSMBTreeUtil.createExternalBTree(ioManager, file, appCtx.getBufferCache(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
index fd7ff0f..aa82113 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalBTreeWithBuddyLocalResourceMetadata.java
@@ -20,7 +20,7 @@
import java.util.Map;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.transactions.Resource;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -67,7 +67,7 @@
@Override
public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
throws HyracksDataException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+ INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
IIOManager ioManager = serviceCtx.getIoManager();
FileReference file = ioManager.resolve(resource.getPath());
return LSMBTreeUtil.createExternalBTreeWithBuddy(ioManager, file, appCtx.getBufferCache(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
index b90fab3..75516ff 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/ExternalRTreeLocalResourceMetadata.java
@@ -20,7 +20,7 @@
import java.util.Map;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
@@ -60,7 +60,7 @@
@Override
public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
throws HyracksDataException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+ INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
IIOManager ioManager = appCtx.getIOManager();
FileReference file = ioManager.resolve(resource.getPath());
return LSMRTreeUtils.createExternalRTree(ioManager, file, appCtx.getBufferCache(), appCtx.getFileMapManager(),
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
index 0776567..4bb30a8 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMBTreeLocalResourceMetadata.java
@@ -20,7 +20,7 @@
import java.util.Map;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.transactions.Resource;
import org.apache.hyracks.api.application.INCServiceContext;
@@ -77,7 +77,7 @@
@Override
public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
throws HyracksDataException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+ INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
IIOManager ioManager = appCtx.getIOManager();
FileReference file = ioManager.resolve(resource.getPath());
int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
index b7408aa..d956647 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMInvertedIndexLocalResourceMetadata.java
@@ -21,7 +21,7 @@
import java.util.List;
import java.util.Map;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.transactions.Resource;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -82,7 +82,7 @@
@Override
public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
throws HyracksDataException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+ INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
IIOManager ioManager = appCtx.getIOManager();
FileReference file = ioManager.resolve(resource.getPath());
int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
index 127a997..f3ac6c1 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/LSMRTreeLocalResourceMetadata.java
@@ -21,7 +21,7 @@
import java.util.List;
import java.util.Map;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.transactions.Resource;
import org.apache.hyracks.api.application.INCServiceContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
@@ -83,7 +83,7 @@
@Override
public ILSMIndex createIndexInstance(INCServiceContext serviceCtx, LocalResource resource)
throws HyracksDataException {
- IAppRuntimeContext appCtx = (IAppRuntimeContext) serviceCtx.getApplicationContext();
+ INcApplicationContext appCtx = (INcApplicationContext) serviceCtx.getApplicationContext();
IIOManager ioManager = appCtx.getIOManager();
FileReference file = ioManager.resolve(resource.getPath());
int ioDeviceNum = Resource.getIoDeviceNum(ioManager, file.getDeviceHandle());
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java
index b114527..1e22458 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/runtime/CommitRuntime.java
@@ -21,7 +21,7 @@
import java.nio.ByteBuffer;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.exceptions.ACIDException;
import org.apache.asterix.common.transactions.ILogManager;
import org.apache.asterix.common.transactions.ILogMarkerCallback;
@@ -65,8 +65,8 @@
public CommitRuntime(IHyracksTaskContext ctx, JobId jobId, int datasetId, int[] primaryKeyFields,
boolean isTemporaryDatasetWriteJob, boolean isWriteTransaction, int resourcePartition, boolean isSink) {
this.ctx = ctx;
- IAppRuntimeContext appCtx =
- (IAppRuntimeContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
+ INcApplicationContext appCtx =
+ (INcApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
this.transactionManager = appCtx.getTransactionSubsystem().getTransactionManager();
this.logMgr = appCtx.getTransactionSubsystem().getLogManager();
this.jobId = jobId;
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
index 6fdee33..6ce543b 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/ReplicationCheckpointManager.java
@@ -23,9 +23,9 @@
import java.util.Set;
import java.util.logging.Logger;
+import org.apache.asterix.common.api.IApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.cluster.ClusterPartition;
-import org.apache.asterix.common.config.IPropertiesProvider;
import org.apache.asterix.common.config.MetadataProperties;
import org.apache.asterix.common.replication.IReplicaResourcesManager;
import org.apache.asterix.common.replication.IReplicationManager;
@@ -54,8 +54,8 @@
@Override
public synchronized void doSharpCheckpoint() throws HyracksDataException {
LOGGER.info("Starting sharp checkpoint...");
- final IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
- .getDatasetLifecycleManager();
+ final IDatasetLifecycleManager datasetLifecycleManager =
+ txnSubsystem.getAsterixAppRuntimeContextProvider().getDatasetLifecycleManager();
datasetLifecycleManager.flushAllDatasets();
long minFirstLSN;
// If shutting down, need to check if we need to keep any remote logs for dead replicas
@@ -99,12 +99,12 @@
boolean checkpointSucceeded = minFirstLSN >= checkpointTargetLSN;
if (!checkpointSucceeded) {
// Flush datasets with indexes behind target checkpoint LSN
- final IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
- .getDatasetLifecycleManager();
+ final IDatasetLifecycleManager datasetLifecycleManager =
+ txnSubsystem.getAsterixAppRuntimeContextProvider().getDatasetLifecycleManager();
datasetLifecycleManager.scheduleAsyncFlushForLaggingDatasets(checkpointTargetLSN);
// Request remote replicas to flush lagging indexes
- final IReplicationManager replicationManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
- .getAppContext().getReplicationManager();
+ final IReplicationManager replicationManager =
+ txnSubsystem.getAsterixAppRuntimeContextProvider().getAppContext().getReplicationManager();
try {
replicationManager.requestFlushLaggingReplicaIndexes(checkpointTargetLSN);
} catch (IOException e) {
@@ -120,14 +120,14 @@
}
private long getDeadReplicasMinFirstLSN(Set<String> deadReplicaIds) {
- final IReplicaResourcesManager remoteResourcesManager = txnSubsystem.getAsterixAppRuntimeContextProvider()
- .getAppContext().getReplicaResourcesManager();
- final IPropertiesProvider propertiesProvider = (IPropertiesProvider) txnSubsystem
- .getAsterixAppRuntimeContextProvider().getAppContext();
+ final IReplicaResourcesManager remoteResourcesManager =
+ txnSubsystem.getAsterixAppRuntimeContextProvider().getAppContext().getReplicaResourcesManager();
+ final IApplicationContext propertiesProvider =
+ txnSubsystem.getAsterixAppRuntimeContextProvider().getAppContext();
final MetadataProperties metadataProperties = propertiesProvider.getMetadataProperties();
final PersistentLocalResourceRepository localResourceRepository =
- (PersistentLocalResourceRepository) txnSubsystem
- .getAsterixAppRuntimeContextProvider().getLocalResourceRepository();
+ (PersistentLocalResourceRepository) txnSubsystem.getAsterixAppRuntimeContextProvider()
+ .getLocalResourceRepository();
// Get partitions of the dead replicas that are not active on this node
final Set<Integer> deadReplicasPartitions = new HashSet<>();
for (String deadReplicaId : deadReplicaIds) {
diff --git a/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java b/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java
index 5b8e5a4..d800cc7 100644
--- a/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java
+++ b/asterixdb/asterix-transactions/src/test/java/org/apache/asterix/transaction/management/service/locking/TestRuntimeContextProvider.java
@@ -23,7 +23,7 @@
import java.util.concurrent.Executors;
import org.apache.asterix.common.api.ThreadExecutor;
-import org.apache.asterix.common.api.IAppRuntimeContext;
+import org.apache.asterix.common.api.INcApplicationContext;
import org.apache.asterix.common.api.IDatasetLifecycleManager;
import org.apache.asterix.common.transactions.IAppRuntimeContextProvider;
import org.apache.asterix.common.transactions.ITransactionSubsystem;
@@ -90,7 +90,7 @@
}
@Override
- public IAppRuntimeContext getAppContext() {
+ public INcApplicationContext getAppContext() {
throw new UnsupportedOperationException();
}
}