Redeploy channels and procedures during recovery
Use the GlobalRecoveryManager extension to redeploy channels/procedures
Restart execution of channels during recovery
Some code cleanup
Added recovery test
Change-Id: I6897ccf9cddb9ec8d10256e252ee893afe6db145
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java b/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
index d422663..0467f6e 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/BADConstants.java
@@ -47,7 +47,7 @@
String FIELD_NAME_ARITY = "Arity";
String FIELD_NAME_DEPENDENCIES = "Dependencies";
String FIELD_NAME_PARAMS = "Params";
- String FIELD_NAME_RETURN_TYPE = "ReturnType";
+ String FIELD_NAME_TYPE = "Type";
String FIELD_NAME_DEFINITION = "Definition";
String FIELD_NAME_LANGUAGE = "Language";
String FIELD_NAME_BODY = "Body";
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/BADJobService.java b/asterix-bad/src/main/java/org/apache/asterix/bad/BADJobService.java
index e587072..69145d9 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/BADJobService.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/BADJobService.java
@@ -21,6 +21,7 @@
import java.io.StringReader;
import java.time.Instant;
import java.util.Date;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
@@ -51,6 +52,7 @@
import org.apache.asterix.translator.IStatementExecutor;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.dataset.IHyracksDataset;
+import org.apache.hyracks.api.dataset.ResultSetId;
import org.apache.hyracks.api.job.DeployedJobSpecId;
import org.apache.hyracks.api.job.JobId;
import org.apache.hyracks.api.job.JobSpecification;
@@ -67,6 +69,20 @@
private static final long millisecondTimeout = BADConstants.EXECUTOR_TIMEOUT * 1000;
+ public static void setupExecutorJob(EntityId entityId, JobSpecification channeljobSpec,
+ IHyracksClientConnection hcc, DeployedJobSpecEventListener listener, ITxnIdFactory txnIdFactory,
+ String duration) throws Exception {
+ if (channeljobSpec != null) {
+ channeljobSpec.setProperty(ActiveNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
+ DeployedJobSpecId deployedId = hcc.deployJobSpec(channeljobSpec);
+ ScheduledExecutorService ses = startRepetitiveDeployedJobSpec(deployedId, hcc, findPeriod(duration),
+ new HashMap<>(), entityId, txnIdFactory, listener);
+ listener.setDeployedJobSpecId(deployedId);
+ listener.setExecutorService(ses);
+ }
+
+ }
+
//Starts running a deployed job specification periodically with an interval of "period" seconds
public static ScheduledExecutorService startRepetitiveDeployedJobSpec(DeployedJobSpecId distributedId,
IHyracksClientConnection hcc, long period, Map<byte[], byte[]> jobParameters, EntityId entityId,
@@ -93,7 +109,8 @@
Map<byte[], byte[]> jobParameters, long period, EntityId entityId, ITxnIdFactory txnIdFactory,
DeployedJobSpecEventListener listener) throws Exception {
long executionMilliseconds =
- runDeployedJobSpec(distributedId, hcc, jobParameters, entityId, txnIdFactory, null, listener, null);
+ runDeployedJobSpec(distributedId, hcc, null, jobParameters, entityId, txnIdFactory, null, listener,
+ null);
if (executionMilliseconds > period) {
LOGGER.log(Level.SEVERE,
"Periodic job for " + entityId.getExtensionName() + " " + entityId.getDataverse() + "."
@@ -106,7 +123,7 @@
}
public static long runDeployedJobSpec(DeployedJobSpecId distributedId, IHyracksClientConnection hcc,
- Map<byte[], byte[]> jobParameters, EntityId entityId, ITxnIdFactory txnIdFactory,
+ IHyracksDataset hdc, Map<byte[], byte[]> jobParameters, EntityId entityId, ITxnIdFactory txnIdFactory,
ICcApplicationContext appCtx, DeployedJobSpecEventListener listener, QueryTranslator statementExecutor)
throws Exception {
listener.waitWhileAtState(ActivityState.SUSPENDED);
@@ -122,7 +139,7 @@
long executionMilliseconds = Instant.now().toEpochMilli() - startTime;
if (listener.getType() == DeployedJobSpecEventListener.PrecompiledType.QUERY) {
- ResultReader resultReader = new ResultReader(listener.getResultDataset(), jobId, listener.getResultId());
+ ResultReader resultReader = new ResultReader(hdc, jobId, new ResultSetId(0));
ResultUtil.printResults(appCtx, resultReader, statementExecutor.getSessionOutput(),
new IStatementExecutor.Stats(), null);
@@ -189,7 +206,7 @@
public static void redeployJobSpec(EntityId entityId, String queryBodyString, MetadataProvider metadataProvider,
BADStatementExecutor badStatementExecutor, IHyracksClientConnection hcc,
- IRequestParameters requestParameters) throws Exception {
+ IRequestParameters requestParameters, boolean useNewId) throws Exception {
ICcApplicationContext appCtx = metadataProvider.getApplicationContext();
ActiveNotificationHandler activeEventHandler =
@@ -217,11 +234,10 @@
}
} else {
//Procedures
- metadataProvider.setResultSetId(listener.getResultId());
- final IStatementExecutor.ResultDelivery resultDelivery =
- requestParameters.getResultProperties().getDelivery();
- final IHyracksDataset hdc = requestParameters.getHyracksDataset();
- final IStatementExecutor.Stats stats = requestParameters.getStats();
+ metadataProvider.setResultSetId(new ResultSetId(0));
+ IStatementExecutor.ResultDelivery resultDelivery = requestParameters.getResultProperties().getDelivery();
+ IHyracksDataset hdc = requestParameters.getHyracksDataset();
+ IStatementExecutor.Stats stats = requestParameters.getStats();
boolean resultsAsync = resultDelivery == IStatementExecutor.ResultDelivery.ASYNC
|| resultDelivery == IStatementExecutor.ResultDelivery.DEFERRED;
metadataProvider.setResultAsyncMode(resultsAsync);
@@ -230,7 +246,12 @@
jobSpec = compileProcedureJob(badStatementExecutor, metadataProvider, hcc, hdc, stats, fStatements.get(1));
}
- hcc.redeployJobSpec(listener.getDeployedJobSpecId(), jobSpec);
+ if (useNewId) {
+ DeployedJobSpecId id = hcc.deployJobSpec(jobSpec);
+ listener.setDeployedJobSpecId(id);
+ } else {
+ hcc.redeployJobSpec(listener.getDeployedJobSpecId(), jobSpec);
+ }
listener.resume();
@@ -239,13 +260,11 @@
public static JobSpecification compileQueryJob(IStatementExecutor statementExecutor,
MetadataProvider metadataProvider, IHyracksClientConnection hcc, Query q) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- JobSpecification jobSpec = null;
+ JobSpecification jobSpec;
try {
jobSpec = statementExecutor.rewriteCompileQuery(hcc, metadataProvider, q, null);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- bActiveTxn = false;
} catch (Exception e) {
((QueryTranslator) statementExecutor).abort(e, e, mdTxnCtx);
throw e;
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
index 4ab7530..2f23a9c 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/BADStatementExecutor.java
@@ -185,9 +185,6 @@
public void handleCreateIndexStatement(MetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc, IRequestParameters requestParameters) throws Exception {
- //TODO: Check whether a delete or insert procedure using the index. If so, we will need to
- // disallow the procedure until after the newly distributed version is ready
-
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
//Allow channels to use the new index
@@ -243,13 +240,13 @@
for (Channel channel : usages.first) {
metadataProvider = new MetadataProvider(appCtx, activeDataverse);
BADJobService.redeployJobSpec(channel.getChannelId(), channel.getChannelBody(), metadataProvider, this, hcc,
- requestParameters);
+ requestParameters, false);
metadataProvider.getLocks().unlock();
}
for (Procedure procedure : usages.second) {
metadataProvider = new MetadataProvider(appCtx, activeDataverse);
BADJobService.redeployJobSpec(procedure.getEntityId(), procedure.getBody(), metadataProvider, this, hcc,
- requestParameters);
+ requestParameters, false);
metadataProvider.getLocks().unlock();
}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
index 22767f2..204e8aa 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateChannelStatement.java
@@ -24,7 +24,6 @@
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
-import java.util.concurrent.ScheduledExecutorService;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -46,7 +45,6 @@
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.MetadataException;
import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.common.transactions.ITxnIdFactory;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.CallExpr;
@@ -75,7 +73,6 @@
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.hyracks.api.dataset.IHyracksDataset;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.DeployedJobSpecId;
import org.apache.hyracks.api.job.JobSpecification;
import org.apache.hyracks.dataflow.common.data.parsers.IValueParser;
@@ -176,42 +173,44 @@
new Identifier(BADConstants.BAD_DATAVERSE_NAME), subscriptionsTypeName, null, null, null,
new HashMap<String, String>(), DatasetType.INTERNAL, idd, null, true);
- //Setup the results dataset
- partitionFields = new ArrayList<>();
- fieldNames = new ArrayList<>();
- fieldNames.add(BADConstants.ResultId);
- partitionFields.add(fieldNames);
- idd = new InternalDetailsDecl(partitionFields, keyIndicators, true, null);
- DatasetDecl createResultsDataset = new DatasetDecl(dataverseName, new Identifier(resultsTableName),
- new Identifier(BADConstants.BAD_DATAVERSE_NAME), resultsTypeName, null, null, null,
- new HashMap<String, String>(), DatasetType.INTERNAL, idd, null, true);
-
- //Create an index on timestamp for results
- CreateIndexStatement createTimeIndex = new CreateIndexStatement();
- createTimeIndex.setDatasetName(new Identifier(resultsTableName));
- createTimeIndex.setDataverseName(dataverseName);
- createTimeIndex.setIndexName(new Identifier(resultsTableName + "TimeIndex"));
- createTimeIndex.setIfNotExists(false);
- createTimeIndex.setIndexType(IndexType.BTREE);
- createTimeIndex.setEnforced(false);
- createTimeIndex.setGramLength(0);
- List<String> fNames = new ArrayList<>();
- fNames.add(BADConstants.ChannelExecutionTime);
- Pair<List<String>, IndexedTypeExpression> fields = new Pair<>(fNames, null);
- createTimeIndex.addFieldExprPair(fields);
- createTimeIndex.addFieldIndexIndicator(0);
-
-
- //Run both statements to create datasets
((QueryTranslator) statementExecutor).handleCreateDatasetStatement(metadataProvider, createSubscriptionsDataset,
hcc, null);
- metadataProvider.getLocks().reset();
- ((QueryTranslator) statementExecutor).handleCreateDatasetStatement(metadataProvider, createResultsDataset, hcc,
- null);
- metadataProvider.getLocks().reset();
- //Create a time index for the results
- ((QueryTranslator) statementExecutor).handleCreateIndexStatement(metadataProvider, createTimeIndex, hcc, null);
+ if (!push) {
+ //Setup the results dataset
+ partitionFields = new ArrayList<>();
+ fieldNames = new ArrayList<>();
+ fieldNames.add(BADConstants.ResultId);
+ partitionFields.add(fieldNames);
+ idd = new InternalDetailsDecl(partitionFields, keyIndicators, true, null);
+ DatasetDecl createResultsDataset = new DatasetDecl(dataverseName, new Identifier(resultsTableName),
+ new Identifier(BADConstants.BAD_DATAVERSE_NAME), resultsTypeName, null, null, null, new HashMap<>(),
+ DatasetType.INTERNAL, idd, null, true);
+
+ //Create an index on timestamp for results
+ CreateIndexStatement createTimeIndex = new CreateIndexStatement();
+ createTimeIndex.setDatasetName(new Identifier(resultsTableName));
+ createTimeIndex.setDataverseName(dataverseName);
+ createTimeIndex.setIndexName(new Identifier(resultsTableName + "TimeIndex"));
+ createTimeIndex.setIfNotExists(false);
+ createTimeIndex.setIndexType(IndexType.BTREE);
+ createTimeIndex.setEnforced(false);
+ createTimeIndex.setGramLength(0);
+ List<String> fNames = new ArrayList<>();
+ fNames.add(BADConstants.ChannelExecutionTime);
+ Pair<List<String>, IndexedTypeExpression> fields = new Pair<>(fNames, null);
+ createTimeIndex.addFieldExprPair(fields);
+ createTimeIndex.addFieldIndexIndicator(0);
+ metadataProvider.getLocks().reset();
+ ((QueryTranslator) statementExecutor).handleCreateDatasetStatement(metadataProvider, createResultsDataset,
+ hcc, null);
+ metadataProvider.getLocks().reset();
+
+ //Create a time index for the results
+ ((QueryTranslator) statementExecutor).handleCreateIndexStatement(metadataProvider, createTimeIndex, hcc,
+ null);
+
+ }
}
@@ -257,18 +256,6 @@
hcc, hdc, ResultDelivery.ASYNC, null, stats, true, null);
}
- private void setupExecutorJob(EntityId entityId, JobSpecification channeljobSpec, IHyracksClientConnection hcc,
- DeployedJobSpecEventListener listener, ITxnIdFactory txnIdFactory) throws Exception {
- if (channeljobSpec != null) {
- channeljobSpec.setProperty(ActiveNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
- DeployedJobSpecId destributedId = hcc.deployJobSpec(channeljobSpec);
- ScheduledExecutorService ses = BADJobService.startRepetitiveDeployedJobSpec(destributedId, hcc,
- BADJobService.findPeriod(duration), new HashMap<>(), entityId, txnIdFactory, listener);
- listener.storeDistributedInfo(destributedId, ses, null, null);
- }
-
- }
-
@Override
public void handle(IHyracksClientConnection hcc, IStatementExecutor statementExecutor,
IRequestParameters requestContext, MetadataProvider metadataProvider, int resultSetId)
@@ -283,7 +270,7 @@
dataverseName = new Identifier(((QueryTranslator) statementExecutor).getActiveDataverse(dataverseName));
dataverse = dataverseName.getValue();
subscriptionsTableName = channelName + BADConstants.subscriptionEnding;
- resultsTableName = channelName + BADConstants.resultsEnding;
+ resultsTableName = push ? "" : channelName + BADConstants.resultsEnding;
EntityId entityId = new EntityId(BADConstants.CHANNEL_EXTENSION_NAME, dataverse, channelName.getValue());
ICcApplicationContext appCtx = metadataProvider.getApplicationContext();
@@ -291,7 +278,7 @@
(ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
DeployedJobSpecEventListener listener = (DeployedJobSpecEventListener) activeEventHandler.getListener(entityId);
boolean alreadyActive = false;
- Channel channel = null;
+ Channel channel;
MetadataTransactionContext mdTxnCtx = null;
try {
@@ -313,7 +300,7 @@
if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, subscriptionsTableName) != null) {
throw new AsterixException("The channel name:" + channelName + " is not available.");
}
- if (MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsTableName) != null) {
+ if (!push && MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverse, resultsTableName) != null) {
throw new AsterixException("The channel name:" + channelName + " is not available.");
}
MetadataProvider tempMdProvider = new MetadataProvider(metadataProvider.getApplicationContext(),
@@ -330,12 +317,12 @@
// Now we subscribe
if (listener == null) {
listener = new DeployedJobSpecEventListener(appCtx, entityId,
- push ? PrecompiledType.PUSH_CHANNEL : PrecompiledType.CHANNEL, null,
- "BadListener");
+ push ? PrecompiledType.PUSH_CHANNEL : PrecompiledType.CHANNEL);
activeEventHandler.registerListener(listener);
}
- setupExecutorJob(entityId, channeljobSpec, hcc, listener, metadataProvider.getTxnIdFactory());
+ BADJobService.setupExecutorJob(entityId, channeljobSpec, hcc, listener, metadataProvider.getTxnIdFactory(),
+ duration);
channel = new Channel(dataverse, channelName.getValue(), subscriptionsTableName, resultsTableName, function,
duration, null, body);
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
index 03db7bc..be5bedb 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/CreateProcedureStatement.java
@@ -72,7 +72,6 @@
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.dataset.IHyracksDataset;
import org.apache.hyracks.api.dataset.ResultSetId;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.DeployedJobSpecId;
@@ -176,7 +175,7 @@
}
private Pair<JobSpecification, PrecompiledType> createProcedureJob(IStatementExecutor statementExecutor,
- MetadataProvider metadataProvider, IHyracksClientConnection hcc, IHyracksDataset hdc, Stats stats)
+ MetadataProvider metadataProvider, IHyracksClientConnection hcc, Stats stats)
throws Exception {
if (getProcedureBodyStatement().getKind() == Statement.Kind.INSERT) {
if (!varList.isEmpty()) {
@@ -188,7 +187,7 @@
insertStatement.getDatasetName().getValue()));
return new Pair<>(
((QueryTranslator) statementExecutor).handleInsertUpsertStatement(metadataProvider,
- getProcedureBodyStatement(), hcc, hdc, ResultDelivery.ASYNC, null, stats, true, null),
+ getProcedureBodyStatement(), hcc, null, ResultDelivery.ASYNC, null, stats, true, null),
PrecompiledType.INSERT);
} else if (getProcedureBodyStatement().getKind() == Statement.Kind.QUERY) {
SqlppRewriterFactory fact = new SqlppRewriterFactory();
@@ -218,11 +217,11 @@
}
private void setupDeployedJobSpec(EntityId entityId, JobSpecification jobSpec, IHyracksClientConnection hcc,
- DeployedJobSpecEventListener listener, ResultSetId resultSetId, IHyracksDataset hdc, Stats stats)
+ DeployedJobSpecEventListener listener, ResultSetId resultSetId, Stats stats)
throws Exception {
jobSpec.setProperty(ActiveNotificationHandler.ACTIVE_ENTITY_PROPERTY_NAME, entityId);
DeployedJobSpecId deployedJobSpecId = hcc.deployJobSpec(jobSpec);
- listener.storeDistributedInfo(deployedJobSpecId, null, hdc, resultSetId);
+ listener.setDeployedJobSpecId(deployedJobSpecId);
}
@Override
@@ -255,29 +254,25 @@
if (alreadyActive) {
throw new AsterixException("Procedure " + signature.getName() + " is already running");
}
- metadataProvider.setResultSetId(new ResultSetId(resultSetId++));
- final ResultDelivery resultDelivery = requestParameters.getResultProperties().getDelivery();
- final IHyracksDataset hdc = requestParameters.getHyracksDataset();
+ metadataProvider.setResultSetId(new ResultSetId(0));
final Stats stats = requestParameters.getStats();
- boolean resultsAsync = resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED;
- metadataProvider.setResultAsyncMode(resultsAsync);
+ metadataProvider.setResultAsyncMode(false);
metadataProvider.setMaxResultReads(1);
//Create Procedure Internal Job
Pair<JobSpecification, PrecompiledType> procedureJobSpec =
- createProcedureJob(statementExecutor, metadataProvider, hcc, hdc, stats);
+ createProcedureJob(statementExecutor, metadataProvider, hcc, stats);
// Now we subscribe
if (listener == null) {
- listener = new DeployedJobSpecEventListener(appCtx, entityId, procedureJobSpec.second, null,
- "BadListener");
+ listener = new DeployedJobSpecEventListener(appCtx, entityId, procedureJobSpec.second);
activeEventHandler.registerListener(listener);
}
setupDeployedJobSpec(entityId, procedureJobSpec.first, hcc, listener, metadataProvider.getResultSetId(),
- hdc,
stats);
procedure = new Procedure(dataverse, signature.getName(), signature.getArity(), getParamList(),
- Function.RETURNTYPE_VOID, getProcedureBody(), Function.LANGUAGE_AQL, duration, dependencies);
+ procedureJobSpec.second.toString(), getProcedureBody(), Function.LANGUAGE_SQLPP, duration,
+ dependencies);
MetadataManager.INSTANCE.addEntity(mdTxnCtx, procedure);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ExecuteProcedureStatement.java b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ExecuteProcedureStatement.java
index 025b9e6..b794538 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ExecuteProcedureStatement.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/lang/statement/ExecuteProcedureStatement.java
@@ -118,7 +118,8 @@
Map<byte[], byte[]> contextRuntimeVarMap = createParameterMap(procedure);
DeployedJobSpecId deployedJobSpecId = listener.getDeployedJobSpecId();
if (procedure.getDuration().equals("")) {
- BADJobService.runDeployedJobSpec(deployedJobSpecId, hcc, contextRuntimeVarMap, entityId,
+ BADJobService.runDeployedJobSpec(deployedJobSpecId, hcc, requestParameters.getHyracksDataset(),
+ contextRuntimeVarMap, entityId,
metadataProvider.getTxnIdFactory(), appCtx, listener, (QueryTranslator) statementExecutor);
@@ -126,8 +127,7 @@
ScheduledExecutorService ses = BADJobService.startRepetitiveDeployedJobSpec(deployedJobSpecId, hcc,
BADJobService.findPeriod(procedure.getDuration()), contextRuntimeVarMap, entityId,
metadataProvider.getTxnIdFactory(), listener);
- listener.storeDistributedInfo(deployedJobSpecId, ses, listener.getResultDataset(),
- listener.getResultId());
+ listener.setExecutorService(ses);
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
txnActive = false;
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
index 1e5e627..a764a5a 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/BADMetadataRecordTypes.java
@@ -28,19 +28,19 @@
public class BADMetadataRecordTypes {
// -------------------------------------- Subscriptions --------------------------------------//
- private static final String[] subTypeFieldNames = { BADConstants.DataverseName, BADConstants.BrokerName,
- BADConstants.SubscriptionId };
+ private static final String[] subTypeFieldNames =
+ { BADConstants.DataverseName, BADConstants.BrokerName, BADConstants.SubscriptionId };
private static final IAType[] subTypeFieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AUUID };
- public static final ARecordType channelSubscriptionsType = new ARecordType(BADConstants.ChannelSubscriptionsType,
- subTypeFieldNames, subTypeFieldTypes, true);
+ public static final ARecordType channelSubscriptionsType =
+ new ARecordType(BADConstants.ChannelSubscriptionsType, subTypeFieldNames, subTypeFieldTypes, true);
// ---------------------------------------- Results --------------------------------------------//
private static final String[] resultTypeFieldNames = { BADConstants.ResultId, BADConstants.ChannelExecutionTime,
BADConstants.SubscriptionId, BADConstants.DeliveryTime };
- private static final IAType[] resultTypeFieldTypes = { BuiltinType.AUUID, BuiltinType.ADATETIME, BuiltinType.AUUID,
- BuiltinType.ADATETIME };
- public static final ARecordType channelResultsType = new ARecordType(BADConstants.ChannelResultsType,
- resultTypeFieldNames, resultTypeFieldTypes, true);
+ private static final IAType[] resultTypeFieldTypes =
+ { BuiltinType.AUUID, BuiltinType.ADATETIME, BuiltinType.AUUID, BuiltinType.ADATETIME };
+ public static final ARecordType channelResultsType =
+ new ARecordType(BADConstants.ChannelResultsType, resultTypeFieldNames, resultTypeFieldTypes, true);
//------------------------------------------ Channel ----------------------------------------//
public static final int CHANNEL_ARECORD_DATAVERSE_NAME_FIELD_INDEX = 0;
@@ -86,7 +86,7 @@
public static final int PROCEDURE_ARECORD_PROCEDURE_NAME_FIELD_INDEX = 1;
public static final int PROCEDURE_ARECORD_PROCEDURE_ARITY_FIELD_INDEX = 2;
public static final int PROCEDURE_ARECORD_PROCEDURE_PARAM_LIST_FIELD_INDEX = 3;
- public static final int PROCEDURE_ARECORD_PROCEDURE_RETURN_TYPE_FIELD_INDEX = 4;
+ public static final int PROCEDURE_ARECORD_PROCEDURE_TYPE_FIELD_INDEX = 4;
public static final int PROCEDURE_ARECORD_PROCEDURE_DEFINITION_FIELD_INDEX = 5;
public static final int PROCEDURE_ARECORD_PROCEDURE_LANGUAGE_FIELD_INDEX = 6;
public static final int PROCEDURE_ARECORD_PROCEDURE_DURATION_FIELD_INDEX = 7;
@@ -96,9 +96,8 @@
BADConstants.RECORD_TYPENAME_PROCEDURE,
// FieldNames
new String[] { BADConstants.DataverseName, BADConstants.ProcedureName, BADConstants.FIELD_NAME_ARITY,
- BADConstants.FIELD_NAME_PARAMS, BADConstants.FIELD_NAME_RETURN_TYPE,
- BADConstants.FIELD_NAME_DEFINITION, BADConstants.FIELD_NAME_LANGUAGE, BADConstants.Duration,
- BADConstants.FIELD_NAME_DEPENDENCIES },
+ BADConstants.FIELD_NAME_PARAMS, BADConstants.FIELD_NAME_TYPE, BADConstants.FIELD_NAME_DEFINITION,
+ BADConstants.FIELD_NAME_LANGUAGE, BADConstants.Duration, BADConstants.FIELD_NAME_DEPENDENCIES },
// FieldTypes
new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
new AOrderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING, BuiltinType.ASTRING,
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
index 78f7c95..4598d71 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/DeployedJobSpecEventListener.java
@@ -30,9 +30,6 @@
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.common.exceptions.RuntimeDataException;
import org.apache.asterix.common.metadata.IDataset;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.api.dataset.IHyracksDataset;
-import org.apache.hyracks.api.dataset.ResultSetId;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.job.DeployedJobSpecId;
import org.apache.log4j.Logger;
@@ -53,9 +50,6 @@
private ScheduledExecutorService executorService = null;
private final PrecompiledType type;
- private IHyracksDataset hdc;
- private ResultSetId resultSetId;
-
// members
protected volatile ActivityState state;
protected final ICcApplicationContext appCtx;
@@ -63,31 +57,18 @@
protected final ActiveEvent statsUpdatedEvent;
protected long statsTimestamp;
protected String stats;
- protected final String runtimeName;
- protected final AlgebricksAbsolutePartitionConstraint locations;
private int runningInstance;
- public DeployedJobSpecEventListener(ICcApplicationContext appCtx, EntityId entityId, PrecompiledType type,
- AlgebricksAbsolutePartitionConstraint locations, String runtimeName) {
+ public DeployedJobSpecEventListener(ICcApplicationContext appCtx, EntityId entityId, PrecompiledType type) {
this.appCtx = appCtx;
this.entityId = entityId;
setState(ActivityState.STOPPED);
this.statsTimestamp = -1;
this.statsUpdatedEvent = new ActiveEvent(null, Kind.STATS_UPDATED, entityId, null);
this.stats = "{\"Stats\":\"N/A\"}";
- this.runtimeName = runtimeName;
- this.locations = locations;
this.type = type;
}
- public IHyracksDataset getResultDataset() {
- return hdc;
- }
-
- public ResultSetId getResultId() {
- return resultSetId;
- }
-
public DeployedJobSpecId getDeployedJobSpecId() {
return deployedJobSpecId;
}
@@ -121,14 +102,15 @@
return type;
}
- public void storeDistributedInfo(DeployedJobSpecId deployedJobSpecId, ScheduledExecutorService ses,
- IHyracksDataset hdc, ResultSetId resultSetId) {
+ public void setDeployedJobSpecId(DeployedJobSpecId deployedJobSpecId) {
this.deployedJobSpecId = deployedJobSpecId;
- this.executorService = ses;
- this.hdc = hdc;
- this.resultSetId = resultSetId;
}
+ public void setExecutorService(ScheduledExecutorService ses) {
+ this.executorService = ses;
+ }
+
+
public ScheduledExecutorService getExecutorService() {
return executorService;
}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
index 50d506b..dff4577 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/Procedure.java
@@ -34,7 +34,7 @@
private final int arity;
private final List<String> params;
private final String body;
- private final String returnType;
+ private final String type;
private final String language;
private final String duration;
/*
@@ -46,12 +46,12 @@
*/
private final List<List<List<String>>> dependencies;
- public Procedure(String dataverseName, String functionName, int arity, List<String> params, String returnType,
+ public Procedure(String dataverseName, String functionName, int arity, List<String> params, String type,
String functionBody, String language, String duration, List<List<List<String>>> dependencies) {
this.procedureId = new EntityId(BADConstants.PROCEDURE_KEYWORD, dataverseName, functionName);
this.params = params;
this.body = functionBody;
- this.returnType = returnType == null ? RETURNTYPE_VOID : returnType;
+ this.type = type;
this.language = language;
this.arity = arity;
this.duration = duration;
@@ -76,8 +76,8 @@
return body;
}
- public String getReturnType() {
- return returnType;
+ public String getType() {
+ return type;
}
public String getLanguage() {
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
index 0a6acb9..a0e6657 100644
--- a/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/metadata/ProcedureTupleTranslator.java
@@ -96,7 +96,7 @@
}
String returnType = ((AString) procedureRecord
- .getValueByPos(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_RETURN_TYPE_FIELD_INDEX))
+ .getValueByPos(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_TYPE_FIELD_INDEX))
.getStringValue();
String definition = ((AString) procedureRecord
@@ -194,9 +194,9 @@
// write field 4
fieldValue.reset();
- aString.setValue(procedure.getReturnType());
+ aString.setValue(procedure.getType());
stringSerde.serialize(aString, fieldValue.getDataOutput());
- recordBuilder.addField(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_RETURN_TYPE_FIELD_INDEX, fieldValue);
+ recordBuilder.addField(BADMetadataRecordTypes.PROCEDURE_ARECORD_PROCEDURE_TYPE_FIELD_INDEX, fieldValue);
// write field 5
fieldValue.reset();
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/recovery/BADGlobalRecoveryManager.java b/asterix-bad/src/main/java/org/apache/asterix/bad/recovery/BADGlobalRecoveryManager.java
new file mode 100644
index 0000000..d34d170
--- /dev/null
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/recovery/BADGlobalRecoveryManager.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.bad.recovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.active.EntityId;
+import org.apache.asterix.active.IActiveEntityEventsListener;
+import org.apache.asterix.app.active.ActiveNotificationHandler;
+import org.apache.asterix.app.result.ResultReader;
+import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
+import org.apache.asterix.app.translator.RequestParameters;
+import org.apache.asterix.bad.BADJobService;
+import org.apache.asterix.bad.lang.BADCompilationProvider;
+import org.apache.asterix.bad.lang.BADLangExtension;
+import org.apache.asterix.bad.lang.BADStatementExecutor;
+import org.apache.asterix.bad.metadata.Channel;
+import org.apache.asterix.bad.metadata.DeployedJobSpecEventListener;
+import org.apache.asterix.bad.metadata.DeployedJobSpecEventListener.PrecompiledType;
+import org.apache.asterix.bad.metadata.Procedure;
+import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.hyracks.bootstrap.GlobalRecoveryManager;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.bootstrap.MetadataBuiltinEntities;
+import org.apache.asterix.metadata.declared.MetadataProvider;
+import org.apache.asterix.translator.IStatementExecutor;
+import org.apache.asterix.translator.ResultProperties;
+import org.apache.asterix.translator.SessionConfig;
+import org.apache.asterix.translator.SessionOutput;
+import org.apache.hyracks.algebricks.runtime.serializer.ResultSerializerFactoryProvider;
+import org.apache.hyracks.algebricks.runtime.writers.PrinterBasedWriterFactory;
+import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.client.dataset.HyracksDataset;
+import org.apache.hyracks.control.common.utils.HyracksThreadFactory;
+
+public class BADGlobalRecoveryManager extends GlobalRecoveryManager {
+
+ private static final Logger LOGGER = Logger.getLogger(BADGlobalRecoveryManager.class.getName());
+
+ public BADGlobalRecoveryManager(ICCServiceContext serviceCtx, IHyracksClientConnection hcc,
+ IStorageComponentProvider componentProvider) {
+ super(serviceCtx, hcc, componentProvider);
+ }
+
+ @Override
+ protected void recover(ICcApplicationContext appCtx) throws HyracksDataException {
+ try {
+ LOGGER.info("Starting Global Recovery");
+ MetadataManager.INSTANCE.init();
+ MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ mdTxnCtx = doRecovery(appCtx, mdTxnCtx);
+ List<Channel> channels = BADLangExtension.getAllChannels(mdTxnCtx);
+ List<Procedure> procedures = BADLangExtension.getAllProcedures(mdTxnCtx);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ deployJobs(appCtx, channels, procedures);
+ recoveryCompleted = true;
+ recovering = false;
+ LOGGER.info("Global Recovery Completed. Refreshing cluster state...");
+ appCtx.getClusterStateManager().refreshState();
+ } catch (Exception e) {
+ throw HyracksDataException.create(e);
+ }
+ }
+
+ private void deployJobs(ICcApplicationContext appCtx, List<Channel> channels, List<Procedure> procedures)
+ throws Exception {
+ SessionConfig sessionConfig =
+ new SessionConfig(SessionConfig.OutputFormat.ADM, true, true, true, SessionConfig.PlanFormat.STRING);
+
+ BADStatementExecutor badStatementExecutor = new BADStatementExecutor(appCtx, new ArrayList<>(),
+ new SessionOutput(sessionConfig, null), new BADCompilationProvider(), Executors.newSingleThreadExecutor(
+ new HyracksThreadFactory(DefaultStatementExecutorFactory.class.getSimpleName())));
+
+ ActiveNotificationHandler activeEventHandler =
+ (ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
+
+ //Remove any lingering listeners
+ for (IActiveEntityEventsListener listener : activeEventHandler.getEventListeners()) {
+ if (listener instanceof DeployedJobSpecEventListener) {
+ if (((DeployedJobSpecEventListener) listener).getExecutorService() != null) {
+ ((DeployedJobSpecEventListener) listener).getExecutorService().shutdown();
+ }
+ activeEventHandler.unregisterListener(listener);
+ }
+ }
+
+ MetadataProvider metadataProvider;
+
+ //Redeploy Jobs
+ for (Channel channel : channels) {
+ EntityId entityId = channel.getChannelId();
+ metadataProvider = new MetadataProvider(appCtx, MetadataBuiltinEntities.DEFAULT_DATAVERSE);
+ DeployedJobSpecEventListener listener = new DeployedJobSpecEventListener(appCtx, entityId,
+ channel.getResultsDatasetName().equals("") ? PrecompiledType.PUSH_CHANNEL
+ : PrecompiledType.CHANNEL);
+ listener.suspend();
+ activeEventHandler.registerListener(listener);
+ BADJobService.redeployJobSpec(entityId, channel.getChannelBody(), metadataProvider, badStatementExecutor,
+ hcc, new RequestParameters(null, null, null, null, null, null), true);
+
+ ScheduledExecutorService ses = BADJobService.startRepetitiveDeployedJobSpec(listener.getDeployedJobSpecId(),
+ hcc,
+ BADJobService.findPeriod(channel.getDuration()), new HashMap<>(), entityId,
+ metadataProvider.getTxnIdFactory(), listener);
+ listener.setExecutorService(ses);
+ metadataProvider.getLocks().unlock();
+
+ LOGGER.log(Level.SEVERE, entityId.getExtensionName() + " " + entityId.getDataverse() + "."
+ + entityId.getEntityName() + " was stopped by cluster failure. It has restarted.");
+
+ }
+ for (Procedure procedure : procedures) {
+ EntityId entityId = procedure.getEntityId();
+ metadataProvider = new MetadataProvider(appCtx, MetadataBuiltinEntities.DEFAULT_DATAVERSE);
+ metadataProvider.setWriterFactory(PrinterBasedWriterFactory.INSTANCE);
+ metadataProvider.setResultSerializerFactoryProvider(ResultSerializerFactoryProvider.INSTANCE);
+ DeployedJobSpecEventListener listener =
+ new DeployedJobSpecEventListener(appCtx, entityId, PrecompiledType.valueOf(procedure.getType()));
+ listener.suspend();
+ activeEventHandler.registerListener(listener);
+ BADJobService.redeployJobSpec(entityId, procedure.getBody(), metadataProvider, badStatementExecutor, hcc,
+ new RequestParameters(
+ new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(),
+ ResultReader.NUM_READERS),
+ new ResultProperties(IStatementExecutor.ResultDelivery.IMMEDIATE),
+ new IStatementExecutor.Stats(), null, null, null),
+ true);
+ metadataProvider.getLocks().unlock();
+ //Log that the procedure stopped by cluster restart. Procedure is available again now.
+ LOGGER.log(Level.SEVERE, entityId.getExtensionName() + " " + entityId.getDataverse() + "."
+ + entityId.getEntityName()
+ + " was lost with cluster failure and any repetitive instances have stopped. It is now available to run again.");
+ //TODO: allow repetitive procedures to restart execution automatically
+ //Issue: need to store in metadata the information for running instances
+ }
+ }
+}
diff --git a/asterix-bad/src/main/java/org/apache/asterix/bad/recovery/BADRecoveryExtension.java b/asterix-bad/src/main/java/org/apache/asterix/bad/recovery/BADRecoveryExtension.java
new file mode 100644
index 0000000..609b6de
--- /dev/null
+++ b/asterix-bad/src/main/java/org/apache/asterix/bad/recovery/BADRecoveryExtension.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.bad.recovery;
+
+import java.util.List;
+
+import org.apache.asterix.app.cc.IGlobalRecoveryExtension;
+import org.apache.asterix.common.api.ExtensionId;
+import org.apache.asterix.common.cluster.IGlobalRecoveryManager;
+import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+
+public class BADRecoveryExtension implements IGlobalRecoveryExtension {
+
+ public static final ExtensionId BAD_RECOVERY_EXTENSION_ID =
+ new ExtensionId(BADRecoveryExtension.class.getSimpleName(), 0);
+
+ @Override
+ public ExtensionId getId() {
+ return BAD_RECOVERY_EXTENSION_ID;
+ }
+
+ @Override
+ public void configure(List<Pair<String, String>> args) {
+ }
+
+ @Override
+ public IGlobalRecoveryManager getGlobalRecoveryManager(ICCServiceContext serviceCtx, IHyracksClientConnection hcc,
+ IStorageComponentProvider componentProvider) {
+ return new BADGlobalRecoveryManager(serviceCtx, hcc, componentProvider);
+ }
+}
diff --git a/asterix-bad/src/main/resources/cc.conf b/asterix-bad/src/main/resources/cc.conf
index 1153dcc..371cbe8 100644
--- a/asterix-bad/src/main/resources/cc.conf
+++ b/asterix-bad/src/main/resources/cc.conf
@@ -59,3 +59,5 @@
enabled = true
[extension/org.apache.asterix.bad.metadata.BADMetadataExtension]
enabled = true
+[extension/org.apache.asterix.bad.recovery.BADRecoveryExtension]
+enabled = true
\ No newline at end of file