Committing the following:- 1) integration tests for managix 2) support for configurable parameters in Asterix 3) refactoring of test cases (removing code duplication
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
index d8477f1..718c0f1 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/algebra/base/LogicalOperatorDeepCopyVisitor.java
@@ -20,7 +20,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -296,11 +295,6 @@
}
@Override
- public ILogicalOperator visitDieOperator(DieOperator op, ILogicalOperator arg) {
- throw new UnsupportedOperationException();
- }
-
- @Override
public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
throws AlgebricksException {
NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(arg));
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
index 3f434b7..f14fff8 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
@@ -31,6 +31,7 @@
import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceEnforcedListTypeRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceInstantLockSearchCallbackRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceRapidFrameFlushProjectRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceStaticTypeCastRule;
import edu.uci.ics.asterix.optimizer.rules.LoadRecordFieldsRule;
@@ -80,7 +81,6 @@
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignDownThroughProductRule;
-import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushDieUpRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushLimitDownRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushNestedOrderByUnderPreSortedGroupByRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushProjectDownRule;
@@ -134,7 +134,6 @@
List<IAlgebraicRewriteRule> condPushDownAndJoinInference = new LinkedList<IAlgebraicRewriteRule>();
condPushDownAndJoinInference.add(new PushSelectDownRule());
- condPushDownAndJoinInference.add(new PushDieUpRule());
condPushDownAndJoinInference.add(new RemoveRedundantListifyRule());
condPushDownAndJoinInference.add(new SimpleUnnestToProductRule());
condPushDownAndJoinInference.add(new ComplexUnnestToProductRule());
@@ -253,6 +252,7 @@
physicalRewritesTopLevel.add(new PushLimitDownRule());
physicalRewritesTopLevel.add(new IntroduceProjectsRule());
physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
+ physicalRewritesTopLevel.add(new IntroduceRapidFrameFlushProjectRule());
physicalRewritesTopLevel.add(new SetExecutionModeRule());
return physicalRewritesTopLevel;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectRule.java
new file mode 100644
index 0000000..d3e11ed2
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectRule.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.optimizer.rules;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.algebra.operators.CommitOperator;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ExtensionOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule will search for project operators in an insert/delete/update plan and
+ * pass a hint to all those projects between the first "insert" and the commit
+ * operator. This hint is used by the project operator so that frames are pushed to
+ * the next operator without waiting until they get full. The purpose of this is to
+ * reduce the time of holding exclusive locks on the keys that have been inserted.
+ *
+ * @author salsubaiee
+ */
+public class IntroduceRapidFrameFlushProjectRule implements IAlgebraicRewriteRule {
+
+ @Override
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ return false;
+ }
+
+ private boolean checkIfRuleIsApplicable(AbstractLogicalOperator op) {
+ if (op.getOperatorTag() != LogicalOperatorTag.EXTENSION_OPERATOR) {
+ return false;
+ }
+ ExtensionOperator extensionOp = (ExtensionOperator) op;
+ if (!(extensionOp.getDelegate() instanceof CommitOperator)) {
+ return false;
+ }
+
+ AbstractLogicalOperator descendantOp = op;
+ while (descendantOp != null) {
+ if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+ if (descendantOp.getPhysicalOperator() == null) {
+ return false;
+ }
+ } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
+ break;
+ }
+ descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+ }
+ return true;
+ }
+
+ @Override
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+
+ AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
+
+ if (!checkIfRuleIsApplicable(op)) {
+ return false;
+ }
+ AbstractLogicalOperator descendantOp = op;
+ ProjectOperator projectOp = null;
+
+ boolean planModified = false;
+ while (descendantOp != null) {
+ if (descendantOp.getOperatorTag() == LogicalOperatorTag.PROJECT) {
+ projectOp = (ProjectOperator) descendantOp;
+ StreamProjectPOperator physicalOp = (StreamProjectPOperator) projectOp.getPhysicalOperator();
+ physicalOp.setRapidFrameFlush(true);
+ planModified = true;
+ } else if (descendantOp.getOperatorTag() == LogicalOperatorTag.INSERT_DELETE) {
+ break;
+ }
+ descendantOp = (AbstractLogicalOperator) descendantOp.getInputs().get(0).getValue();
+ }
+ return planModified;
+ }
+}
\ No newline at end of file
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
index 0f01466..eed0f80 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlExpressionToPlanTranslator.java
@@ -22,7 +22,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -124,7 +123,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -918,15 +916,6 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitDieClause(DieClause lc, Mutable<ILogicalOperator> tupSource)
- throws AsterixException {
- Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getDieExpr(), tupSource);
- DieOperator opDie = new DieOperator(p1.first);
- opDie.getInputs().add(p1.second);
- return new Pair<ILogicalOperator, LogicalVariable>(opDie, null);
- }
-
- @Override
public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
Mutable<ILogicalOperator> tupSource) throws AsterixException {
List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
index 3440ce8..9f28113 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AqlPlusExpressionToPlanTranslator.java
@@ -22,7 +22,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -123,7 +122,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
@@ -192,7 +190,7 @@
}
private final JobId jobId;
- private TranslationContext context;
+ private TranslationContext context;
private String outputDatasetName;
private ICompiledDmlStatement stmt;
private AqlMetadataProvider metadataProvider;
@@ -201,8 +199,6 @@
private MetaScopeILogicalOperator metaScopeOp = new MetaScopeILogicalOperator();
private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
-
-
public AqlPlusExpressionToPlanTranslator(JobId jobId, AqlMetadataProvider metadataProvider,
Counter currentVarCounter, String outputDatasetName, ICompiledDmlStatement stmt) {
this.jobId = jobId;
@@ -221,8 +217,8 @@
return translate(expr, null);
}
- public ILogicalPlan translate(Query expr, AqlMetadataProvider metadata)
- throws AlgebricksException, AsterixException {
+ public ILogicalPlan translate(Query expr, AqlMetadataProvider metadata) throws AlgebricksException,
+ AsterixException {
IDataFormat format = metadata.getFormat();
if (format == null) {
throw new AlgebricksException("Data format has not been set.");
@@ -877,15 +873,6 @@
}
@Override
- public Pair<ILogicalOperator, LogicalVariable> visitDieClause(DieClause lc, Mutable<ILogicalOperator> tupSource)
- throws AsterixException {
- Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = aqlExprToAlgExpression(lc.getDieExpr(), tupSource);
- DieOperator opDie = new DieOperator(p1.first);
- opDie.getInputs().add(p1.second);
- return new Pair<ILogicalOperator, LogicalVariable>(opDie, null);
- }
-
- @Override
public Pair<ILogicalOperator, LogicalVariable> visitDistinctClause(DistinctClause dc,
Mutable<ILogicalOperator> tupSource) throws AsterixException {
List<Mutable<ILogicalExpression>> exprList = new ArrayList<Mutable<ILogicalExpression>>();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
index 625fed1..6cf8d28 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
@@ -49,527 +49,490 @@
*/
public class CompiledStatements {
- public static interface ICompiledStatement {
+ public static interface ICompiledStatement {
- public Kind getKind();
- }
+ public Kind getKind();
+ }
- public static class CompiledWriteFromQueryResultStatement implements
- ICompiledDmlStatement {
+ public static class CompiledWriteFromQueryResultStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private Query query;
- private int varCounter;
+ private String dataverseName;
+ private String datasetName;
+ private Query query;
+ private int varCounter;
- public CompiledWriteFromQueryResultStatement(String dataverseName,
- String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledWriteFromQueryResultStatement(String dataverseName, String datasetName, Query query,
+ int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.WRITE_FROM_QUERY_RESULT;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.WRITE_FROM_QUERY_RESULT;
+ }
- }
+ }
- public static class CompiledDatasetDropStatement implements
- ICompiledStatement {
- private final String dataverseName;
- private final String datasetName;
+ public static class CompiledDatasetDropStatement implements ICompiledStatement {
+ private final String dataverseName;
+ private final String datasetName;
- public CompiledDatasetDropStatement(String dataverseName,
- String datasetName) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- }
+ public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- @Override
- public Kind getKind() {
- return Kind.DATASET_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DATASET_DROP;
+ }
+ }
- // added by yasser
- public static class CompiledCreateDataverseStatement implements
- ICompiledStatement {
- private String dataverseName;
- private String format;
+ // added by yasser
+ public static class CompiledCreateDataverseStatement implements ICompiledStatement {
+ private String dataverseName;
+ private String format;
- public CompiledCreateDataverseStatement(String dataverseName,
- String format) {
- this.dataverseName = dataverseName;
- this.format = format;
- }
+ public CompiledCreateDataverseStatement(String dataverseName, String format) {
+ this.dataverseName = dataverseName;
+ this.format = format;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getFormat() {
- return format;
- }
+ public String getFormat() {
+ return format;
+ }
- @Override
- public Kind getKind() {
- return Kind.CREATE_DATAVERSE;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_DATAVERSE;
+ }
+ }
- public static class CompiledNodeGroupDropStatement implements
- ICompiledStatement {
- private String nodeGroupName;
+ public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
+ private String nodeGroupName;
- public CompiledNodeGroupDropStatement(String nodeGroupName) {
- this.nodeGroupName = nodeGroupName;
- }
+ public CompiledNodeGroupDropStatement(String nodeGroupName) {
+ this.nodeGroupName = nodeGroupName;
+ }
- public String getNodeGroupName() {
- return nodeGroupName;
- }
+ public String getNodeGroupName() {
+ return nodeGroupName;
+ }
- @Override
- public Kind getKind() {
- return Kind.NODEGROUP_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.NODEGROUP_DROP;
+ }
+ }
- public static class CompiledIndexDropStatement implements
- ICompiledStatement {
- private String dataverseName;
- private String datasetName;
- private String indexName;
+ public static class CompiledIndexDropStatement implements ICompiledStatement {
+ private String dataverseName;
+ private String datasetName;
+ private String indexName;
- public CompiledIndexDropStatement(String dataverseName,
- String datasetName, String indexName) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.indexName = indexName;
- }
+ public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.indexName = indexName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getIndexName() {
- return indexName;
- }
+ public String getIndexName() {
+ return indexName;
+ }
- @Override
- public Kind getKind() {
- return Kind.INDEX_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.INDEX_DROP;
+ }
+ }
- public static class CompiledDataverseDropStatement implements
- ICompiledStatement {
- private String dataverseName;
- private boolean ifExists;
+ public static class CompiledDataverseDropStatement implements ICompiledStatement {
+ private String dataverseName;
+ private boolean ifExists;
- public CompiledDataverseDropStatement(String dataverseName,
- boolean ifExists) {
- this.dataverseName = dataverseName;
- this.ifExists = ifExists;
- }
+ public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
+ this.dataverseName = dataverseName;
+ this.ifExists = ifExists;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public boolean getIfExists() {
- return ifExists;
- }
+ public boolean getIfExists() {
+ return ifExists;
+ }
- @Override
- public Kind getKind() {
- return Kind.DATAVERSE_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DATAVERSE_DROP;
+ }
+ }
- public static class CompiledTypeDropStatement implements ICompiledStatement {
- private String typeName;
+ public static class CompiledTypeDropStatement implements ICompiledStatement {
+ private String typeName;
- public CompiledTypeDropStatement(String nodeGroupName) {
- this.typeName = nodeGroupName;
- }
+ public CompiledTypeDropStatement(String nodeGroupName) {
+ this.typeName = nodeGroupName;
+ }
- public String getTypeName() {
- return typeName;
- }
+ public String getTypeName() {
+ return typeName;
+ }
- @Override
- public Kind getKind() {
- return Kind.TYPE_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.TYPE_DROP;
+ }
+ }
- public static interface ICompiledDmlStatement extends ICompiledStatement {
+ public static interface ICompiledDmlStatement extends ICompiledStatement {
- public String getDataverseName();
+ public String getDataverseName();
- public String getDatasetName();
- }
+ public String getDatasetName();
+ }
- public static class CompiledCreateIndexStatement implements
- ICompiledDmlStatement {
- private final String indexName;
- private final String dataverseName;
- private final String datasetName;
- private final List<String> keyFields;
- private final IndexType indexType;
+ public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
+ private final String indexName;
+ private final String dataverseName;
+ private final String datasetName;
+ private final List<String> keyFields;
+ private final IndexType indexType;
- // Specific to NGram index.
- private final int gramLength;
+ // Specific to NGram index.
+ private final int gramLength;
- public CompiledCreateIndexStatement(String indexName,
- String dataverseName, String datasetName,
- List<String> keyFields, int gramLength, IndexType indexType) {
- this.indexName = indexName;
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.keyFields = keyFields;
- this.gramLength = gramLength;
- this.indexType = indexType;
- }
+ public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
+ List<String> keyFields, int gramLength, IndexType indexType) {
+ this.indexName = indexName;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.keyFields = keyFields;
+ this.gramLength = gramLength;
+ this.indexType = indexType;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getIndexName() {
- return indexName;
- }
+ public String getIndexName() {
+ return indexName;
+ }
- public List<String> getKeyFields() {
- return keyFields;
- }
+ public List<String> getKeyFields() {
+ return keyFields;
+ }
- public IndexType getIndexType() {
- return indexType;
- }
+ public IndexType getIndexType() {
+ return indexType;
+ }
- public int getGramLength() {
- return gramLength;
- }
+ public int getGramLength() {
+ return gramLength;
+ }
- @Override
- public Kind getKind() {
- return Kind.CREATE_INDEX;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_INDEX;
+ }
+ }
- public static class CompiledLoadFromFileStatement implements
- ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private boolean alreadySorted;
- private String adapter;
- private Map<String, String> properties;
+ public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private boolean alreadySorted;
+ private String adapter;
+ private Map<String, String> properties;
- public CompiledLoadFromFileStatement(String dataverseName,
- String datasetName, String adapter,
- Map<String, String> properties, boolean alreadySorted) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.alreadySorted = alreadySorted;
- this.adapter = adapter;
- this.properties = properties;
- }
+ public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
+ Map<String, String> properties, boolean alreadySorted) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.alreadySorted = alreadySorted;
+ this.adapter = adapter;
+ this.properties = properties;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
-
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public boolean alreadySorted() {
- return alreadySorted;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getAdapter() {
- return adapter;
- }
+ public boolean alreadySorted() {
+ return alreadySorted;
+ }
- public Map<String, String> getProperties() {
- return properties;
- }
+ public String getAdapter() {
+ return adapter;
+ }
- @Override
- public Kind getKind() {
- return Kind.LOAD_FROM_FILE;
- }
- }
+ public Map<String, String> getProperties() {
+ return properties;
+ }
- public static class CompiledInsertStatement implements
- ICompiledDmlStatement {
- private final String dataverseName;
- private final String datasetName;
- private final Query query;
- private final int varCounter;
+ @Override
+ public Kind getKind() {
+ return Kind.LOAD_FROM_FILE;
+ }
+ }
- public CompiledInsertStatement(String dataverseName,
- String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public static class CompiledInsertStatement implements ICompiledDmlStatement {
+ private final String dataverseName;
+ private final String datasetName;
+ private final Query query;
+ private final int varCounter;
- public String getDataverseName() {
- return dataverseName;
- }
+ public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public Query getQuery() {
- return query;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- @Override
- public Kind getKind() {
- return Kind.INSERT;
- }
- }
+ public Query getQuery() {
+ return query;
+ }
- public static class CompiledBeginFeedStatement implements
- ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private Query query;
- private int varCounter;
+ @Override
+ public Kind getKind() {
+ return Kind.INSERT;
+ }
+ }
- public CompiledBeginFeedStatement(String dataverseName,
- String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public static class CompiledBeginFeedStatement implements ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private Query query;
+ private int varCounter;
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ public CompiledBeginFeedStatement(String dataverseName, String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public Query getQuery() {
- return query;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public void setQuery(Query query) {
- this.query = query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.BEGIN_FEED;
- }
- }
+ public void setQuery(Query query) {
+ this.query = query;
+ }
- public static class CompiledControlFeedStatement implements
- ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private OperationType operationType;
- private Query query;
- private int varCounter;
- private Map<String, String> alteredParams;
+ @Override
+ public Kind getKind() {
+ return Kind.BEGIN_FEED;
+ }
+ }
- public CompiledControlFeedStatement(OperationType operationType,
- String dataverseName, String datasetName,
- Map<String, String> alteredParams) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.operationType = operationType;
- this.alteredParams = alteredParams;
- }
+ public static class CompiledControlFeedStatement implements ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private OperationType operationType;
+ private Query query;
+ private int varCounter;
+ private Map<String, String> alteredParams;
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ public CompiledControlFeedStatement(OperationType operationType, String dataverseName, String datasetName,
+ Map<String, String> alteredParams) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.operationType = operationType;
+ this.alteredParams = alteredParams;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public OperationType getOperationType() {
- return operationType;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public OperationType getOperationType() {
+ return operationType;
+ }
- public Query getQuery() {
- return query;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- @Override
- public Kind getKind() {
- return Kind.CONTROL_FEED;
- }
+ public Query getQuery() {
+ return query;
+ }
- public Map<String, String> getProperties() {
- return alteredParams;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CONTROL_FEED;
+ }
- public void setProperties(Map<String, String> properties) {
- this.alteredParams = properties;
- }
- }
+ public Map<String, String> getProperties() {
+ return alteredParams;
+ }
- public static class CompiledDeleteStatement implements
- ICompiledDmlStatement {
- private VariableExpr var;
- private String dataverseName;
- private String datasetName;
- private Expression condition;
- private Clause dieClause;
- private int varCounter;
- private AqlMetadataProvider metadataProvider;
+ public void setProperties(Map<String, String> properties) {
+ this.alteredParams = properties;
+ }
+ }
- public CompiledDeleteStatement(VariableExpr var, String dataverseName,
- String datasetName, Expression condition, Clause dieClause,
- int varCounter, AqlMetadataProvider metadataProvider) {
- this.var = var;
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.condition = condition;
- this.dieClause = dieClause;
- this.varCounter = varCounter;
- this.metadataProvider = metadataProvider;
- }
+ public static class CompiledDeleteStatement implements ICompiledDmlStatement {
+ private VariableExpr var;
+ private String dataverseName;
+ private String datasetName;
+ private Expression condition;
+ private int varCounter;
+ private AqlMetadataProvider metadataProvider;
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
+ Expression condition, int varCounter, AqlMetadataProvider metadataProvider) {
+ this.var = var;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.condition = condition;
+ this.varCounter = varCounter;
+ this.metadataProvider = metadataProvider;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public Expression getCondition() {
- return condition;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Clause getDieClause() {
- return dieClause;
- }
+ public Expression getCondition() {
+ return condition;
+ }
- public Query getQuery() throws AlgebricksException {
+ public Query getQuery() throws AlgebricksException {
- List<Expression> arguments = new ArrayList<Expression>();
- String arg = dataverseName == null ? datasetName : dataverseName
- + "." + datasetName;
- LiteralExpr argumentLiteral = new LiteralExpr(
- new StringLiteral(arg));
- arguments.add(argumentLiteral);
+ List<Expression> arguments = new ArrayList<Expression>();
+ String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
+ LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
+ arguments.add(argumentLiteral);
- CallExpr callExpression = new CallExpr(new FunctionSignature(
- FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
- List<Clause> clauseList = new ArrayList<Clause>();
- Clause forClause = new ForClause(var, callExpression);
- clauseList.add(forClause);
- Clause whereClause = null;
- if (condition != null) {
- whereClause = new WhereClause(condition);
- clauseList.add(whereClause);
- }
- if (dieClause != null) {
- clauseList.add(dieClause);
- }
+ CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
+ arguments);
+ List<Clause> clauseList = new ArrayList<Clause>();
+ Clause forClause = new ForClause(var, callExpression);
+ clauseList.add(forClause);
+ Clause whereClause = null;
+ if (condition != null) {
+ whereClause = new WhereClause(condition);
+ clauseList.add(whereClause);
+ }
- Dataset dataset = metadataProvider.findDataset(dataverseName,
- datasetName);
- if (dataset == null) {
- throw new AlgebricksException("Unknown dataset " + datasetName);
- }
- String itemTypeName = dataset.getItemTypeName();
- IAType itemType = metadataProvider.findType(
- dataset.getDataverseName(), itemTypeName);
- ARecordType recType = (ARecordType) itemType;
- String[] fieldNames = recType.getFieldNames();
- List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
- for (int i = 0; i < fieldNames.length; i++) {
- FieldAccessor fa = new FieldAccessor(var, new Identifier(
- fieldNames[i]));
- FieldBinding fb = new FieldBinding(new LiteralExpr(
- new StringLiteral(fieldNames[i])), fa);
- fieldBindings.add(fb);
- }
- RecordConstructor rc = new RecordConstructor(fieldBindings);
+ Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasetName);
+ }
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
+ ARecordType recType = (ARecordType) itemType;
+ String[] fieldNames = recType.getFieldNames();
+ List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
+ for (int i = 0; i < fieldNames.length; i++) {
+ FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
+ FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
+ fieldBindings.add(fb);
+ }
+ RecordConstructor rc = new RecordConstructor(fieldBindings);
- FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
- Query query = new Query();
- query.setBody(flowgr);
- return query;
- }
+ FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
+ Query query = new Query();
+ query.setBody(flowgr);
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.DELETE;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DELETE;
+ }
- }
+ }
}
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index 40d6333..15af3bc 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -75,7 +75,6 @@
<!-- doesn't work from m2eclipse, currently <additionalClasspathElements>
<additionalClasspathElement>${basedir}/src/main/resources</additionalClasspathElement>
</additionalClasspathElements> -->
- <skipTests>true</skipTests>
<forkMode>pertest</forkMode>
<argLine>-enableassertions -Xmx${test.heap.size}m
-Dfile.encoding=UTF-8
@@ -184,12 +183,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>com.kenai.nbpwr</groupId>
- <artifactId>org-apache-commons-io</artifactId>
- <version>1.3.1-201002241208</version>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>0.20.2</version>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 2fc9525..428781f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -22,7 +22,6 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.logging.Logger;
import org.json.JSONArray;
import org.json.JSONException;
@@ -60,7 +59,6 @@
import edu.uci.ics.asterix.aql.expression.TypeDropStatement;
import edu.uci.ics.asterix.aql.expression.WriteFromQueryResultStatement;
import edu.uci.ics.asterix.aql.expression.WriteStatement;
-import edu.uci.ics.asterix.common.config.AsterixProperties;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
@@ -125,6 +123,11 @@
*/
public class AqlTranslator extends AbstractAqlTranslator {
+ private enum ProgressState {
+ NO_PROGRESS,
+ ADDED_PENDINGOP_RECORD_TO_METADATA
+ }
+
private final List<Statement> aqlStatements;
private final PrintWriter out;
private final SessionConfig sessionConfig;
@@ -132,8 +135,6 @@
private Dataverse activeDefaultDataverse;
private List<FunctionDecl> declaredFunctions;
- private static Logger LOGGER = Logger.getLogger(AqlTranslator.class.getName());
-
public AqlTranslator(List<Statement> aqlStatements, PrintWriter out, SessionConfig pc, DisplayFormat pdf)
throws MetadataException, AsterixException {
this.aqlStatements = aqlStatements;
@@ -157,16 +158,12 @@
* Compiles and submits for execution a list of AQL statements.
*
* @param hcc
- * A Hyracks client connection that is used to submit a jobspec
- * to Hyracks.
+ * A Hyracks client connection that is used to submit a jobspec to Hyracks.
* @param hdc
- * A Hyracks dataset client object that is used to read the
- * results.
+ * A Hyracks dataset client object that is used to read the results.
* @param asyncResults
- * True if the results should be read asynchronously or false if
- * we should wait for results to be read.
- * @return A List<QueryResult> containing a QueryResult instance
- * corresponding to each submitted query.
+ * True if the results should be read asynchronously or false if we should wait for results to be read.
+ * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
* @throws Exception
*/
public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc, IHyracksDataset hdc, boolean asyncResults)
@@ -319,7 +316,7 @@
}
private Dataverse handleUseDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws MetadataException, RemoteException, ACIDException {
+ throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -335,15 +332,14 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return dv;
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new MetadataException(e);
} finally {
releaseReadLatch();
}
}
- private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+ private void handleCreateDataverseStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -353,14 +349,19 @@
CreateDataverseStatement stmtCreateDataverse = (CreateDataverseStatement) stmt;
String dvName = stmtCreateDataverse.getDataverseName().getValue();
Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName);
- if (dv != null && !stmtCreateDataverse.getIfNotExists()) {
- throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
+ if (dv != null) {
+ if (stmtCreateDataverse.getIfNotExists()) {
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ return;
+ } else {
+ throw new AlgebricksException("A dataverse with this name " + dvName + " already exists.");
+ }
}
MetadataManager.INSTANCE.addDataverse(metadataProvider.getMetadataTxnContext(), new Dataverse(dvName,
stmtCreateDataverse.getFormat(), IMetadataEntity.PENDING_NO_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
@@ -370,6 +371,7 @@
private void handleCreateDatasetStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws AsterixException, Exception {
+ ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -449,12 +451,12 @@
}
}
- // #. initialize DatasetIdFactory if it is not initialized.
+ //#. initialize DatasetIdFactory if it is not initialized.
if (!DatasetIdFactory.isInitialized()) {
DatasetIdFactory.initialize(MetadataManager.INSTANCE.getMostRecentDatasetId());
}
- // #. add a new dataset with PendingAddOp
+ //#. add a new dataset with PendingAddOp
dataset = new Dataset(dataverseName, datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType,
DatasetIdFactory.generateDatasetId(), IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
@@ -465,34 +467,37 @@
JobSpecification jobSpec = DatasetOperations.createDatasetJobSpec(dataverse, datasetName,
metadataProvider);
- // #. make metadataTxn commit before calling runJob.
+ //#. make metadataTxn commit before calling runJob.
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
+ progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
- // #. runJob
+ //#. runJob
runJob(hcc, jobSpec, true);
- // #. begin new metadataTxn
+ //#. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
- // #. add a new dataset with PendingNoOp after deleting the dataset
- // with PendingAddOp
+ //#. add a new dataset with PendingNoOp after deleting the dataset with PendingAddOp
MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName);
- MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), new Dataset(dataverseName,
- datasetName, itemTypeName, datasetDetails, dd.getHints(), dsType, dataset.getDatasetId(),
- IMetadataEntity.PENDING_NO_OP));
+ dataset.setPendingOp(IMetadataEntity.PENDING_NO_OP);
+ MetadataManager.INSTANCE.addDataset(metadataProvider.getMetadataTxnContext(), dataset);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
- if (dataset != null) {
- // #. execute compensation operations
- // remove the index in NC
+ if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+
+ //#. execute compensation operations
+ // remove the index in NC
+ // [Notice]
+ // As long as we updated(and committed) metadata, we should remove any effect of the job
+ // because an exception occurs during runJob.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -503,15 +508,14 @@
bActiveTxn = false;
runJob(hcc, jobSpec, true);
- } catch (Exception e3) {
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e2, mdTxnCtx);
}
- // do no throw exception since still the metadata needs to
- // be compensated.
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -519,8 +523,10 @@
datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw new AlgebricksException(e2);
+ e.addSuppressed(e2);
+ abort(e, e2, mdTxnCtx);
+ throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+ + "." + datasetName + ") couldn't be removed from the metadata", e);
}
}
@@ -533,6 +539,7 @@
private void handleCreateIndexStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
+ ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -570,22 +577,21 @@
aRecordType.validateKeyFields(stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getIndexType());
if (idx != null) {
- if (!stmtCreateIndex.getIfNotExists()) {
- throw new AlgebricksException("An index with this name " + indexName + " already exists.");
- } else {
- stmtCreateIndex.setNeedToCreate(false);
+ if (stmtCreateIndex.getIfNotExists()) {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
return;
+ } else {
+ throw new AlgebricksException("An index with this name " + indexName + " already exists.");
}
}
- // #. add a new index with PendingAddOp
+ //#. add a new index with PendingAddOp
Index index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
IMetadataEntity.PENDING_ADD_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
- // #. create the index artifact in NC.
+ //#. prepare to create the index artifact in NC.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName,
index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexCreationJobSpec(cis, metadataProvider);
@@ -595,14 +601,16 @@
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
+ progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+ //#. create the index artifact in NC.
runJob(hcc, spec, true);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- // #. load data into the index in NC.
+ //#. load data into the index in NC.
cis = new CompiledCreateIndexStatement(index.getIndexName(), dataverseName, index.getDatasetName(),
index.getKeyFieldNames(), index.getGramLength(), index.getIndexType());
spec = IndexOperations.buildSecondaryIndexLoadingJobSpec(cis, metadataProvider);
@@ -611,29 +619,26 @@
runJob(hcc, spec, true);
- // #. begin new metadataTxn
+ //#. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- // #. add another new index with PendingNoOp after deleting the
- // index with PendingAddOp
+ //#. add another new index with PendingNoOp after deleting the index with PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName, datasetName,
indexName);
- index = new Index(dataverseName, datasetName, indexName, stmtCreateIndex.getIndexType(),
- stmtCreateIndex.getFieldExprs(), stmtCreateIndex.getGramLength(), false,
- IMetadataEntity.PENDING_NO_OP);
+ index.setPendingOp(IMetadataEntity.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
- if (spec != null) {
- // #. execute compensation operations
- // remove the index in NC
+ if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+ //#. execute compensation operations
+ // remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -644,15 +649,14 @@
bActiveTxn = false;
runJob(hcc, jobSpec, true);
- } catch (Exception e3) {
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e2, mdTxnCtx);
}
- // do no throw exception since still the metadata needs to
- // be compensated.
}
- // remove the record from the metadata.
+ // remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
@@ -660,8 +664,10 @@
datasetName, indexName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw new AlgebricksException(e2);
+ e.addSuppressed(e2);
+ abort(e, e2, mdTxnCtx);
+ throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
+ + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
}
}
throw new AlgebricksException(e);
@@ -670,8 +676,7 @@
}
}
- private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws AlgebricksException, RemoteException, ACIDException, MetadataException {
+ private void handleCreateTypeStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -707,7 +712,7 @@
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
@@ -717,58 +722,60 @@
private void handleDataverseDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
+ ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
- String dvName = null;
+ String dataverseName = null;
List<JobSpecification> jobsToExecute = new ArrayList<JobSpecification>();
try {
DataverseDropStatement stmtDelete = (DataverseDropStatement) stmt;
- dvName = stmtDelete.getDataverseName().getValue();
+ dataverseName = stmtDelete.getDataverseName().getValue();
- Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dvName);
+ Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
if (dv == null) {
- if (!stmtDelete.getIfExists()) {
- throw new AlgebricksException("There is no dataverse with this name " + dvName + ".");
+ if (stmtDelete.getIfExists()) {
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ return;
+ } else {
+ throw new AlgebricksException("There is no dataverse with this name " + dataverseName + ".");
}
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- return;
}
- // #. prepare jobs which will drop corresponding datasets with
- // indexes.
- List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dvName);
+ //#. prepare jobs which will drop corresponding datasets with indexes.
+ List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverseName);
for (int j = 0; j < datasets.size(); j++) {
String datasetName = datasets.get(j).getDatasetName();
DatasetType dsType = datasets.get(j).getDatasetType();
if (dsType == DatasetType.INTERNAL || dsType == DatasetType.FEED) {
- List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dvName, datasetName);
+ List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName,
+ datasetName);
for (int k = 0; k < indexes.size(); k++) {
if (indexes.get(k).isSecondaryIndex()) {
- CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dvName, datasetName,
+ CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
indexes.get(k).getIndexName());
jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
}
}
- CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dvName, datasetName);
+ CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
}
}
- // #. mark PendingDropOp on the dataverse record by
- // first, deleting the dataverse record from the DATAVERSE_DATASET
- // second, inserting the dataverse record with the PendingDropOp
- // value into the DATAVERSE_DATASET
- MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
- MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dvName, dv.getDataFormat(),
+ //#. mark PendingDropOp on the dataverse record by
+ // first, deleting the dataverse record from the DATAVERSE_DATASET
+ // second, inserting the dataverse record with the PendingDropOp value into the DATAVERSE_DATASET
+ MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+ MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverseName, dv.getDataFormat(),
IMetadataEntity.PENDING_DROP_OP));
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
+ progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
@@ -778,33 +785,45 @@
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- // #. finally, delete the dataverse.
- MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dvName);
- if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dvName) {
+ //#. finally, delete the dataverse.
+ MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+ if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
activeDefaultDataverse = null;
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
- // #. execute compensation operations
- // remove the all indexes in NC
- for (JobSpecification jobSpec : jobsToExecute) {
- runJob(hcc, jobSpec, true);
- }
+ if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+ if (activeDefaultDataverse != null && activeDefaultDataverse.getDataverseName() == dataverseName) {
+ activeDefaultDataverse = null;
+ }
- // remove the record from the metadata.
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- try {
- MetadataManager.INSTANCE.dropDataverse(metadataProvider.getMetadataTxnContext(), dvName);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- } catch (Exception e2) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw new AlgebricksException(e2);
+ //#. execute compensation operations
+ // remove the all indexes in NC
+ try {
+ for (JobSpecification jobSpec : jobsToExecute) {
+ runJob(hcc, jobSpec, true);
+ }
+ } catch (Exception e2) {
+ //do no throw exception since still the metadata needs to be compensated.
+ e.addSuppressed(e2);
+ }
+
+ // remove the record from the metadata.
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ try {
+ MetadataManager.INSTANCE.dropDataverse(mdTxnCtx, dataverseName);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
+ abort(e, e2, mdTxnCtx);
+ throw new IllegalStateException("System is inconsistent state: pending dataverse(" + dataverseName
+ + ") couldn't be removed from the metadata", e);
+ }
}
throw new AlgebricksException(e);
@@ -816,6 +835,7 @@
private void handleDatasetDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
+ ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -835,17 +855,18 @@
Dataset ds = MetadataManager.INSTANCE.getDataset(mdTxnCtx, dataverseName, datasetName);
if (ds == null) {
- if (!stmtDelete.getIfExists()) {
+ if (stmtDelete.getIfExists()) {
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ return;
+ } else {
throw new AlgebricksException("There is no dataset with this name " + datasetName
+ " in dataverse " + dataverseName + ".");
}
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- return;
}
if (ds.getDatasetType() == DatasetType.INTERNAL || ds.getDatasetType() == DatasetType.FEED) {
- // #. prepare jobs to drop the datatset and the indexes in NC
+ //#. prepare jobs to drop the datatset and the indexes in NC
List<Index> indexes = MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx, dataverseName, datasetName);
for (int j = 0; j < indexes.size(); j++) {
if (indexes.get(j).isSecondaryIndex()) {
@@ -857,7 +878,7 @@
CompiledDatasetDropStatement cds = new CompiledDatasetDropStatement(dataverseName, datasetName);
jobsToExecute.add(DatasetOperations.createDropDatasetJobSpec(cds, metadataProvider));
- // #. mark the existing dataset as PendingDropOp
+ //#. mark the existing dataset as PendingDropOp
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.addDataset(
mdTxnCtx,
@@ -866,8 +887,9 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
+ progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
- // #. run the jobs
+ //#. run the jobs
for (JobSpecification jobSpec : jobsToExecute) {
runJob(hcc, jobSpec, true);
}
@@ -877,31 +899,40 @@
metadataProvider.setMetadataTxnContext(mdTxnCtx);
}
- // #. finally, delete the dataset.
+ //#. finally, delete the dataset.
MetadataManager.INSTANCE.dropDataset(mdTxnCtx, dataverseName, datasetName);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
- // #. execute compensation operations
- // remove the all indexes in NC
- for (JobSpecification jobSpec : jobsToExecute) {
- runJob(hcc, jobSpec, true);
- }
+ if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+ //#. execute compensation operations
+ // remove the all indexes in NC
+ try {
+ for (JobSpecification jobSpec : jobsToExecute) {
+ runJob(hcc, jobSpec, true);
+ }
+ } catch (Exception e2) {
+ //do no throw exception since still the metadata needs to be compensated.
+ e.addSuppressed(e2);
+ }
- // remove the record from the metadata.
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- try {
- MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
- datasetName);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- } catch (Exception e2) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw new AlgebricksException(e2);
+ // remove the record from the metadata.
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ try {
+ MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(), dataverseName,
+ datasetName);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
+ abort(e, e2, mdTxnCtx);
+ throw new IllegalStateException("System is inconsistent state: pending dataset(" + dataverseName
+ + "." + datasetName + ") couldn't be removed from the metadata", e);
+ }
}
throw new AlgebricksException(e);
@@ -913,6 +944,7 @@
private void handleIndexDropStatement(AqlMetadataProvider metadataProvider, Statement stmt,
IHyracksClientConnection hcc) throws Exception {
+ ProgressState progress = ProgressState.NO_PROGRESS;
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
boolean bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -941,38 +973,39 @@
indexName = stmtIndexDrop.getIndexName().getValue();
Index index = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, indexName);
if (index == null) {
- if (!stmtIndexDrop.getIfExists()) {
+ if (stmtIndexDrop.getIfExists()) {
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ return;
+ } else {
throw new AlgebricksException("There is no index with this name " + indexName + ".");
}
- } else {
- // #. prepare a job to drop the index in NC.
- CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName,
- indexName);
- jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
-
- // #. mark PendingDropOp on the existing index
- MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
- MetadataManager.INSTANCE.addIndex(
- mdTxnCtx,
- new Index(dataverseName, datasetName, indexName, index.getIndexType(), index
- .getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
-
- // #. commit the existing transaction before calling runJob.
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- bActiveTxn = false;
-
- for (JobSpecification jobSpec : jobsToExecute) {
- runJob(hcc, jobSpec, true);
- }
-
- // #. begin a new transaction
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- bActiveTxn = true;
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
-
- // #. finally, delete the existing index
- MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
}
+ //#. prepare a job to drop the index in NC.
+ CompiledIndexDropStatement cds = new CompiledIndexDropStatement(dataverseName, datasetName, indexName);
+ jobsToExecute.add(IndexOperations.buildDropSecondaryIndexJobSpec(cds, metadataProvider));
+
+ //#. mark PendingDropOp on the existing index
+ MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
+ MetadataManager.INSTANCE.addIndex(mdTxnCtx,
+ new Index(dataverseName, datasetName, indexName, index.getIndexType(),
+ index.getKeyFieldNames(), index.isPrimaryIndex(), IMetadataEntity.PENDING_DROP_OP));
+
+ //#. commit the existing transaction before calling runJob.
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ bActiveTxn = false;
+ progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
+
+ for (JobSpecification jobSpec : jobsToExecute) {
+ runJob(hcc, jobSpec, true);
+ }
+
+ //#. begin a new transaction
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ bActiveTxn = true;
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+
+ //#. finally, delete the existing index
+ MetadataManager.INSTANCE.dropIndex(mdTxnCtx, dataverseName, datasetName, indexName);
} else {
throw new AlgebricksException(datasetName
+ " is an external dataset. Indexes are not maintained for external datasets.");
@@ -981,25 +1014,34 @@
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
- // #. execute compensation operations
- // remove the all indexes in NC
- for (JobSpecification jobSpec : jobsToExecute) {
- runJob(hcc, jobSpec, true);
- }
+ if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
+ //#. execute compensation operations
+ // remove the all indexes in NC
+ try {
+ for (JobSpecification jobSpec : jobsToExecute) {
+ runJob(hcc, jobSpec, true);
+ }
+ } catch (Exception e2) {
+ //do no throw exception since still the metadata needs to be compensated.
+ e.addSuppressed(e2);
+ }
- // remove the record from the metadata.
- mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- metadataProvider.setMetadataTxnContext(mdTxnCtx);
- try {
- MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
- datasetName, indexName);
- MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
- } catch (Exception e2) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
- throw new AlgebricksException(e2);
+ // remove the record from the metadata.
+ mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+ metadataProvider.setMetadataTxnContext(mdTxnCtx);
+ try {
+ MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), dataverseName,
+ datasetName, indexName);
+ MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ e.addSuppressed(e2);
+ abort(e, e2, mdTxnCtx);
+ throw new IllegalStateException("System is inconsistent state: pending index(" + dataverseName
+ + "." + datasetName + "." + indexName + ") couldn't be removed from the metadata", e);
+ }
}
throw new AlgebricksException(e);
@@ -1009,8 +1051,7 @@
}
}
- private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws AlgebricksException, MetadataException, RemoteException, ACIDException {
+ private void handleTypeDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1033,15 +1074,14 @@
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
}
}
- private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+ private void handleNodegroupDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1060,15 +1100,14 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
}
}
- private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws AlgebricksException, MetadataException, RemoteException, ACIDException {
+ private void handleCreateFunctionStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
@@ -1091,15 +1130,14 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
}
}
- private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws MetadataException, RemoteException, ACIDException, AlgebricksException {
+ private void handleFunctionDropStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
acquireWriteLatch();
@@ -1116,7 +1154,7 @@
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
@@ -1149,8 +1187,7 @@
if (!index.isSecondaryIndex()) {
continue;
}
- // Create CompiledCreateIndexStatement from metadata entity
- // 'index'.
+ // Create CompiledCreateIndexStatement from metadata entity 'index'.
CompiledCreateIndexStatement cis = new CompiledCreateIndexStatement(index.getIndexName(),
dataverseName, index.getDatasetName(), index.getKeyFieldNames(), index.getGramLength(),
index.getIndexType());
@@ -1164,7 +1201,7 @@
}
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
@@ -1196,7 +1233,7 @@
}
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
} finally {
@@ -1230,7 +1267,7 @@
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
} finally {
@@ -1252,8 +1289,8 @@
String dataverseName = stmtDelete.getDataverseName() == null ? activeDefaultDataverse == null ? null
: activeDefaultDataverse.getDataverseName() : stmtDelete.getDataverseName().getValue();
CompiledDeleteStatement clfrqs = new CompiledDeleteStatement(stmtDelete.getVariableExpr(), dataverseName,
- stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getDieClause(),
- stmtDelete.getVarCounter(), metadataProvider);
+ stmtDelete.getDatasetName().getValue(), stmtDelete.getCondition(), stmtDelete.getVarCounter(),
+ metadataProvider);
JobSpecification compiled = rewriteCompileQuery(metadataProvider, clfrqs.getQuery(), clfrqs);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
@@ -1265,7 +1302,7 @@
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
} finally {
@@ -1330,7 +1367,7 @@
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
} finally {
@@ -1360,7 +1397,7 @@
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
} finally {
@@ -1422,7 +1459,7 @@
return queryResult;
} catch (Exception e) {
if (bActiveTxn) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
}
throw new AlgebricksException(e);
} finally {
@@ -1430,8 +1467,7 @@
}
}
- private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt)
- throws MetadataException, AlgebricksException, RemoteException, ACIDException {
+ private void handleCreateNodeGroupStatement(AqlMetadataProvider metadataProvider, Statement stmt) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -1454,7 +1490,7 @@
}
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
- MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ abort(e, e, mdTxnCtx);
throw new AlgebricksException(e);
} finally {
releaseWriteLatch();
@@ -1509,4 +1545,13 @@
private void releaseReadLatch() {
MetadataManager.INSTANCE.releaseReadLatch();
}
-}
+
+ private void abort(Exception rootE, Exception parentE, MetadataTransactionContext mdTxnCtx) {
+ try {
+ MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+ } catch (Exception e2) {
+ parentE.addSuppressed(e2);
+ throw new IllegalStateException(rootE);
+ }
+ }
+}
\ No newline at end of file
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index 25ef5da..1b8990b 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -79,8 +79,8 @@
}
private void setupWebServer() throws Exception {
- int port = Integer.parseInt((String) AsterixProperties.INSTANCE.getProperty(
- AsterixProperties.AsterixConfigurationKeys.WEB_INTERFACE_PORT, "" + DEFAULT_WEB_SERVER_PORT));
+ int port = Integer.parseInt((String) AsterixProperties.INSTANCE
+ .getProperty(AsterixProperties.AsterixConfigurationKeys.WEB_INTERFACE_PORT));
webServer = new Server(port);
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
deleted file mode 100644
index 85616e4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree.aql
+++ /dev/null
@@ -1,30 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-use dataverse test;
-
-create type MyRecord as closed {
- id: int32,
- tweetid: int64,
- loc: point,
- time: datetime,
- text: string
-}
-
-create dataset MyData(MyRecord)
- primary key id;
-
-create index rtree_index on MyData(loc) type rtree;
-
-load dataset MyData
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/twitter/smalltweets.txt"),("format"="adm")) pre-sorted;
-
-delete $l from dataset MyData where spatial-intersect($l.loc, create-rectangle(create-point(0.0,-100.0), create-point(55.5,50.0))) die after 1000;
-
-write output to nc1:"rttest/failure_delete-rtree.adm";
-
-for $o in dataset('MyData')
-where spatial-intersect($o.loc, create-rectangle(create-point(0.0,-100.0), create-point(55.5,50.0)))
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
deleted file mode 100644
index 381ad3f..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.1.ddl.aql
+++ /dev/null
@@ -1,19 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-
-use dataverse test;
-
-create type MyRecord as closed {
- id: int32,
- tweetid: int64,
- loc: point,
- time: datetime,
- text: string
-}
-
-create dataset MyData(MyRecord)
- primary key id;
-
-create index rtree_index_loc on MyData(loc) type rtree;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.2.update.aql
deleted file mode 100644
index 3556905..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.2.update.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse test;
-
-load dataset MyData
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/twitter/smalltweets.txt"),("format"="adm")) pre-sorted;
-
-delete $l from dataset MyData where $l.id>=50 die after 1500;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.3.query.aql
deleted file mode 100644
index b33019b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete-rtree/delete-rtree.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
deleted file mode 100644
index 125fd99..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete.aql
+++ /dev/null
@@ -1,38 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-use dataverse test;
-
-create type LineItemType as closed {
- l_orderkey: int32,
- l_partkey: int32,
- l_suppkey: int32,
- l_linenumber: int32,
- l_quantity: int32,
- l_extendedprice: double,
- l_discount: double,
- l_tax: double,
- l_returnflag: string,
- l_linestatus: string,
- l_shipdate: string,
- l_commitdate: string,
- l_receiptdate: string,
- l_shipinstruct: string,
- l_shipmode: string,
- l_comment: string
-}
-
-create dataset LineItem(LineItemType)
- primary key l_orderkey, l_linenumber;
-
-load dataset LineItem
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-delete $l from dataset LineItem where $l.l_orderkey>=10 die after 1000;
-
-write output to nc1:"rttest/failure_delete.adm";
-for $c in dataset('LineItem')
-where $c.l_orderkey>=10
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.1.ddl.aql
deleted file mode 100644
index ea29045..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.1.ddl.aql
+++ /dev/null
@@ -1,28 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-
-use dataverse test;
-
-create type LineItemType as closed {
- l_orderkey: int32,
- l_partkey: int32,
- l_suppkey: int32,
- l_linenumber: int32,
- l_quantity: int32,
- l_extendedprice: double,
- l_discount: double,
- l_tax: double,
- l_returnflag: string,
- l_linestatus: string,
- l_shipdate: string,
- l_commitdate: string,
- l_receiptdate: string,
- l_shipinstruct: string,
- l_shipmode: string,
- l_comment: string
-}
-
-create dataset LineItem(LineItemType)
- primary key l_orderkey, l_linenumber;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.2.update.aql
deleted file mode 100644
index ac2bd60..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.2.update.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse test;
-
-load dataset LineItem
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-delete $l from dataset LineItem where $l.l_orderkey>=10 die after 1500;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.3.query.aql
deleted file mode 100644
index 9375d30..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/delete/delete.3.query.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineItem')
-where $c.l_orderkey>=10
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.1.ddl.aql
deleted file mode 100644
index 0cb052d..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.1.ddl.aql
+++ /dev/null
@@ -1,25 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-
-use dataverse test;
-
-create type MyRecord as closed {
- id: int32,
- tweetid: int64,
- loc: point,
- time: datetime,
- text: string
-}
-
-create type MyMiniRecord as closed {
- id: int32,
- loc: point
-}
-
-create dataset MyData(MyRecord)
- primary key id;
-
-create dataset MyMiniData(MyMiniRecord)
- primary key id;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.2.update.aql
deleted file mode 100644
index bffd599..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.2.update.aql
+++ /dev/null
@@ -1,11 +0,0 @@
-use dataverse test;
-
-load dataset MyData
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/twitter/smalltweets.txt"),("format"="adm")) pre-sorted;
-
-
-load dataset MyMiniData
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/spatial/spatialData0.json"),("format"="adm")) pre-sorted;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.3.ddl.aql
deleted file mode 100644
index 3626933..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.3.ddl.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-create index rtree_index_loc_0 on MyData(loc) type rtree;
-create index rtree_index_loc on MyMiniData(loc) type rtree;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.4.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.4.update.aql
deleted file mode 100644
index 5874119..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.4.update.aql
+++ /dev/null
@@ -1,23 +0,0 @@
-use dataverse test;
-
-insert into dataset MyMiniData
-(
- for $m in dataset('MyData')
- where $m.id<1000
- return {
- "id": $m.id,
- "loc": $m.loc
- }
-);
-
-insert into dataset MyMiniData
-(
- for $m in dataset('MyData')
- where $m.id>=1000
- die after 1000
- return {
- "id": $m.id,
- "loc": $m.loc
- }
-);
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.5.query.aql
deleted file mode 100644
index 6c75ca2..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert-rtree/insert-rtree.5.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyMiniData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.1.ddl.aql
deleted file mode 100644
index 7f5844e..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.1.ddl.aql
+++ /dev/null
@@ -1,37 +0,0 @@
-drop dataverse test if exists;
-
-create dataverse test;
-
-use dataverse test;
-
-create type LineItemType as closed {
- l_orderkey: int32,
- l_partkey: int32,
- l_suppkey: int32,
- l_linenumber: int32,
- l_quantity: double,
- l_extendedprice: double,
- l_discount: double,
- l_tax: double,
- l_returnflag: string,
- l_linestatus: string,
- l_shipdate: string,
- l_commitdate: string,
- l_receiptdate: string,
- l_shipinstruct: string,
- l_shipmode: string,
- l_comment: string
-}
-
-create type LineIDType as closed {
- l_orderkey: int32,
- l_linenumber: int32,
- l_suppkey: int32
-}
-
-create dataset LineItem(LineItemType)
- primary key l_orderkey, l_linenumber;
-
-create dataset LineID(LineIDType)
- primary key l_orderkey, l_linenumber;
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.2.update.aql
deleted file mode 100644
index a1bfc0d..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.2.update.aql
+++ /dev/null
@@ -1,32 +0,0 @@
-use dataverse test;
-
-load dataset LineItem
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-load dataset LineID
-using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="nc1://data/tpch0.001/lineitem_0.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
-
-insert into dataset LineID (
-for $l in dataset('LineItem')
- where $l.l_orderkey<1000
- return {
- "l_orderkey": $l.l_orderkey,
- "l_linenumber": $l.l_linenumber,
- "l_suppkey": $l.l_partkey
- }
-);
-
-insert into dataset LineID (
-for $l in dataset('LineItem')
- where $l.l_orderkey>=1000
- die after 1000
- return {
- "l_orderkey": $l.l_orderkey,
- "l_linenumber": $l.l_linenumber,
- "l_suppkey": $l.l_partkey
- }
-);
-
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.3.query.aql
deleted file mode 100644
index 4a3e056..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/insert/insert.3.query.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineID')
-order by $c.l_orderkey, $c.l_linenumber
-return $c
-
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree.aql
deleted file mode 100644
index a8d7d37..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree.aql
+++ /dev/null
@@ -1,8 +0,0 @@
-use dataverse test;
-
-write output to nc1:"rttest/failure_verify_delete-rtree.adm";
-
-for $o in dataset('MyData')
-where spatial-intersect($o.loc, create-rectangle(create-point(0.0,-100.0), create-point(55.5,50.0)))
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.3.query.aql
deleted file mode 100644
index b33019b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete-rtree/verify_delete-rtree.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.3.query.aql
deleted file mode 100644
index 2a0f3e4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_delete/verify_delete.3.query.aql
+++ /dev/null
@@ -1,6 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineItem')
-where $c.l_orderkey>=10
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.3.query.aql
deleted file mode 100644
index 6c75ca2..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert-rtree/verify_insert-rtree.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $o in dataset('MyMiniData')
-order by $o.id
-return {"id":$o.id}
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.1.ddl.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.1.ddl.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.2.update.aql
deleted file mode 100644
index e69de29..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.2.update.aql
+++ /dev/null
diff --git a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.3.query.aql
deleted file mode 100644
index 4cb52c5..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/failure/verify_insert/verify_insert.3.query.aql
+++ /dev/null
@@ -1,5 +0,0 @@
-use dataverse test;
-
-for $c in dataset('LineID')
-order by $c.l_orderkey, $c.l_linenumber
-return $c
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index dd29a7b..278d04a 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -1034,42 +1034,6 @@
</test-case>
</test-group>
<test-group name="failure">
- <test-case FilePath="failure">
- <compilation-unit name="delete-rtree">
- <output-dir compare="Text">delete-rtree</output-dir>
- <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
- </compilation-unit>
- <compilation-unit name="verify_delete-rtree">
- <output-dir compare="Text">delete-rtree</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="failure">
- <compilation-unit name="delete">
- <output-dir compare="Text">delete</output-dir>
- <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
- </compilation-unit>
- <compilation-unit name="verify_delete">
- <output-dir compare="Text">delete</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="failure">
- <compilation-unit name="insert-rtree">
- <output-dir compare="Text">insert-rtree</output-dir>
- <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
- </compilation-unit>
- <compilation-unit name="verify_insert-rtree">
- <output-dir compare="Text">insert-rtree</output-dir>
- </compilation-unit>
- </test-case>
- <test-case FilePath="failure">
- <compilation-unit name="insert">
- <output-dir compare="Text">insert</output-dir>
- <expected-error>edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
- </compilation-unit>
- <compilation-unit name="verify_insert">
- <output-dir compare="Text">insert</output-dir>
- </compilation-unit>
- </test-case>
<!--
<test-case FilePath="failure">
<compilation-unit name="q1_pricing_summary_report_failure">
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
index 8be2d40..3e92653 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/base/Clause.java
@@ -9,7 +9,6 @@
WHERE_CLAUSE,
ORDER_BY_CLAUSE,
LIMIT_CLAUSE,
- DIE_CLAUSE,
GROUP_BY_CLAUSE,
DISTINCT_BY_CLAUSE,
UPDATE_CLAUSE
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
index ffd0534..7a764ae 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/CreateIndexStatement.java
@@ -12,7 +12,6 @@
public class CreateIndexStatement implements Statement {
private Identifier indexName;
- private boolean needToCreate = true;
private Identifier dataverseName;
private Identifier datasetName;
private List<String> fieldExprs = new ArrayList<String>();
@@ -33,14 +32,6 @@
return gramLength;
}
- public void setNeedToCreate(boolean needToCreate) {
- this.needToCreate = needToCreate;
- }
-
- public boolean getNeedToCreate() {
- return needToCreate;
- }
-
public Identifier getIndexName() {
return indexName;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
index 48b7909..b56d628 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DeleteStatement.java
@@ -1,6 +1,5 @@
package edu.uci.ics.asterix.aql.expression;
-import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
@@ -13,16 +12,14 @@
private Identifier dataverseName;
private Identifier datasetName;
private Expression condition;
- private Clause dieClause;
private int varCounter;
public DeleteStatement(VariableExpr vars, Identifier dataverseName, Identifier datasetName, Expression condition,
- Clause dieClause, int varCounter) {
+ int varCounter) {
this.vars = vars;
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.condition = condition;
- this.dieClause = dieClause;
this.varCounter = varCounter;
}
@@ -47,10 +44,6 @@
return condition;
}
- public Clause getDieClause() {
- return dieClause;
- }
-
public int getVarCounter() {
return varCounter;
}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DieClause.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DieClause.java
deleted file mode 100644
index 3a77d58..0000000
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/DieClause.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package edu.uci.ics.asterix.aql.expression;
-
-import edu.uci.ics.asterix.aql.base.Clause;
-import edu.uci.ics.asterix.aql.base.Expression;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlExpressionVisitor;
-import edu.uci.ics.asterix.aql.expression.visitor.IAqlVisitorWithVoidReturn;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class DieClause implements Clause {
- private Expression expr;
-
- public DieClause() {
- }
-
- public DieClause(Expression dieexpr) {
- this.expr = dieexpr;
- }
-
- public Expression getDieExpr() {
- return expr;
- }
-
- public void setDieExpr(Expression dieexpr) {
- this.expr = dieexpr;
- }
-
- @Override
- public ClauseType getClauseType() {
- return ClauseType.DIE_CLAUSE;
- }
-
- @Override
- public <T> void accept(IAqlVisitorWithVoidReturn<T> visitor, T arg) throws AsterixException {
- visitor.visit(this, arg);
- }
-
- @Override
- public <R, T> R accept(IAqlExpressionVisitor<R, T> visitor, T arg) throws AsterixException {
- return visitor.visitDieClause(this, arg);
- }
-}
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
index ab55e34..3506e61 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/AQLPrintVisitor.java
@@ -7,7 +7,6 @@
import edu.uci.ics.asterix.aql.base.Clause;
import edu.uci.ics.asterix.aql.base.Expression;
import edu.uci.ics.asterix.aql.base.Literal;
-import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.BeginFeedStatement;
import edu.uci.ics.asterix.aql.expression.CallExpr;
import edu.uci.ics.asterix.aql.expression.ControlFeedStatement;
@@ -18,7 +17,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -284,12 +282,6 @@
}
@Override
- public void visit(DieClause lc, Integer step) throws AsterixException {
- out.println(skip(step) + "Limit");
- lc.getDieExpr().accept(this, step + 1);
- }
-
- @Override
public void visit(FunctionDecl fd, Integer step) throws AsterixException {
out.println(skip(step) + "FunctionDecl " + fd.getSignature().getName() + "(" + fd.getParamList().toString()
+ ") {");
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
index 6c8b844..1f8e980 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlExpressionVisitor.java
@@ -10,7 +10,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -141,7 +140,7 @@
R visitSetStatement(SetStatement ss, T arg) throws AsterixException;
R visitBeginFeedStatement(BeginFeedStatement bf, T arg) throws AsterixException;
-
+
R visitControlFeedStatement(ControlFeedStatement del, T arg) throws AsterixException;
R visitCallExpr(CallExpr pf, T arg) throws AsterixException;
@@ -150,8 +149,6 @@
R visitWriteFromQueryResultStatement(WriteFromQueryResultStatement stmtLoad, T arg) throws AsterixException;
- R visitDieClause(DieClause stmtLoad, T arg) throws AsterixException;
-
R visit(CreateFunctionStatement cfs, T arg) throws AsterixException;
R visitFunctionDropStatement(FunctionDropStatement del, T arg) throws AsterixException;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
index d4891ac..ff04032 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/expression/visitor/IAqlVisitorWithVoidReturn.java
@@ -10,7 +10,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -143,13 +142,11 @@
void visit(DataverseDropStatement stmtDel, T arg) throws AsterixException;
void visit(TypeDropStatement stmtDel, T arg) throws AsterixException;
-
+
void visit(BeginFeedStatement stmtDel, T arg) throws AsterixException;
void visit(ControlFeedStatement stmtDel, T arg) throws AsterixException;
- void visit(DieClause stmtDel, T arg) throws AsterixException;
-
void visit(CreateFunctionStatement cfs, T arg) throws AsterixException;
void visit(FunctionDropStatement fds, T arg) throws AsterixException;
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
index 62e40fa..84d8321 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/AqlRewriter.java
@@ -20,7 +20,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -343,12 +342,6 @@
}
@Override
- public Void visitDieClause(DieClause lc, Void arg) throws AsterixException {
- lc.getDieExpr().accept(this, arg);
- return null;
- }
-
- @Override
public Void visitListConstructor(ListConstructor lc, Void arg) throws AsterixException {
for (Expression e : lc.getExprList()) {
e.accept(this, arg);
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
index 5742ac6..cbd15a2 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/CloneAndSubstituteVariablesVisitor.java
@@ -17,7 +17,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -187,7 +186,7 @@
} else {
// it is a var. from the context
var = context.getRewrittenVar(v.getVar().getId());
- if(var == null){
+ if (var == null) {
var = v.getVar();
}
}
@@ -257,14 +256,6 @@
}
@Override
- public Pair<IAqlExpression, List<VariableSubstitution>> visitDieClause(DieClause lc, List<VariableSubstitution> arg)
- throws AsterixException {
- Pair<IAqlExpression, List<VariableSubstitution>> p1 = lc.getDieExpr().accept(this, arg);
- DieClause c = new DieClause((Expression) p1.first);
- return new Pair<IAqlExpression, List<VariableSubstitution>>(c, arg);
- }
-
- @Override
public Pair<IAqlExpression, List<VariableSubstitution>> visitListConstructor(ListConstructor lc,
List<VariableSubstitution> arg) throws AsterixException {
List<Expression> oldExprList = lc.getExprList();
diff --git a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
index f178d88..9834f27 100644
--- a/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
+++ b/asterix-aql/src/main/java/edu/uci/ics/asterix/aql/rewrites/InlineUdfsVisitor.java
@@ -18,7 +18,6 @@
import edu.uci.ics.asterix.aql.expression.DataverseDecl;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
import edu.uci.ics.asterix.aql.expression.DeleteStatement;
-import edu.uci.ics.asterix.aql.expression.DieClause;
import edu.uci.ics.asterix.aql.expression.DistinctClause;
import edu.uci.ics.asterix.aql.expression.DropStatement;
import edu.uci.ics.asterix.aql.expression.FLWOGRExpression;
@@ -104,15 +103,15 @@
public Boolean visitRecordConstructor(RecordConstructor rc, List<FunctionDecl> arg) throws AsterixException {
boolean changed = false;
for (FieldBinding b : rc.getFbList()) {
- Pair<Boolean, Expression> leftExprInlined = inlineUdfsInExpr(b.getLeftExpr(), arg);
- b.setLeftExpr(leftExprInlined.second);
- changed = changed | leftExprInlined.first;
- Pair<Boolean, Expression> rightExprInlined = inlineUdfsInExpr(b.getRightExpr(), arg);
- b.setRightExpr(rightExprInlined.second);
- changed = changed | rightExprInlined.first;
-
- /*
- if (b.getLeftExpr().accept(this, arg)) {
+ Pair<Boolean, Expression> leftExprInlined = inlineUdfsInExpr(b.getLeftExpr(), arg);
+ b.setLeftExpr(leftExprInlined.second);
+ changed = changed | leftExprInlined.first;
+ Pair<Boolean, Expression> rightExprInlined = inlineUdfsInExpr(b.getRightExpr(), arg);
+ b.setRightExpr(rightExprInlined.second);
+ changed = changed | rightExprInlined.first;
+
+ /*
+ if (b.getLeftExpr().accept(this, arg)) {
changed = true;
}
if (b.getRightExpr().accept(this, arg)) {
@@ -251,13 +250,6 @@
}
@Override
- public Boolean visitDieClause(DieClause lc, List<FunctionDecl> arg) throws AsterixException {
- Pair<Boolean, Expression> p1 = inlineUdfsInExpr(lc.getDieExpr(), arg);
- lc.setDieExpr(p1.second);
- return p1.first;
- }
-
- @Override
public Boolean visitUnaryExpr(UnaryExpr u, List<FunctionDecl> arg) throws AsterixException {
return u.getExpr().accept(this, arg);
}
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index be7d954..68e1497 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -270,7 +270,6 @@
Identifier dataverseName;
Identifier datasetName = null;
Expression condition = null;
- Clause dieClause = null;
Pair<Identifier, Identifier> nameComponents;
}
{
@@ -280,8 +279,8 @@
{
nameComponents = getDotSeparatedPair();
}
- ("where" condition = Expression())? (dieClause = DieClause())? (";")?
- {return new DeleteStatement(var, nameComponents.first, nameComponents.second, condition, dieClause, getVarCounter()); }
+ ("where" condition = Expression())? (";")?
+ {return new DeleteStatement(var, nameComponents.first, nameComponents.second, condition, getVarCounter()); }
}
UpdateStatement UpdateStatement() throws ParseException:
@@ -2174,7 +2173,6 @@
| clause = GroupClause()
| clause = LimitClause()
| clause = DistinctClause()
- | clause = DieClause()
)
{
return clause;
@@ -2408,21 +2406,6 @@
}
}
-DieClause DieClause() throws ParseException:
-{
- DieClause lc = new DieClause();
- Expression expr;
- pushForbiddenScope(getCurrentScope());
-}
-{
- "die" "after" expr = Expression() { lc.setDieExpr(expr); }
- {
- popForbiddenScope();
- return lc;
- }
-}
-
-
QuantifiedExpression QuantifiedExpression()throws ParseException:
{
QuantifiedExpression qc = new QuantifiedExpression();
diff --git a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
new file mode 100644
index 0000000..7e9b08f
--- /dev/null
+++ b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
@@ -0,0 +1,428 @@
+package edu.uci.ics.asterix.test.aql;
+
+import static org.junit.Assert.fail;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.logging.Logger;
+
+import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.HttpStatus;
+import org.apache.commons.httpclient.NameValuePair;
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.commons.httpclient.params.HttpMethodParams;
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+
+import edu.uci.ics.asterix.testframework.context.TestCaseContext;
+import edu.uci.ics.asterix.testframework.context.TestFileContext;
+import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+
+public class TestsUtils {
+
+ private static final String EXTENSION_AQL_RESULT = "adm";
+ private static final Logger LOGGER = Logger.getLogger(TestsUtils.class
+ .getName());
+ private static Method managixExecuteMethod = null;
+
+ /**
+ * Probably does not work well with symlinks.
+ */
+ public static boolean deleteRec(File path) {
+ if (path.isDirectory()) {
+ for (File f : path.listFiles()) {
+ if (!deleteRec(f)) {
+ return false;
+ }
+ }
+ }
+ return path.delete();
+ }
+
+ public static void runScriptAndCompareWithResult(File scriptFile,
+ PrintWriter print, File expectedFile, File actualFile)
+ throws Exception {
+ BufferedReader readerExpected = new BufferedReader(
+ new InputStreamReader(new FileInputStream(expectedFile),
+ "UTF-8"));
+ BufferedReader readerActual = new BufferedReader(new InputStreamReader(
+ new FileInputStream(actualFile), "UTF-8"));
+ String lineExpected, lineActual;
+ int num = 1;
+ try {
+ while ((lineExpected = readerExpected.readLine()) != null) {
+ lineActual = readerActual.readLine();
+ // Assert.assertEquals(lineExpected, lineActual);
+ if (lineActual == null) {
+ if (lineExpected.isEmpty()) {
+ continue;
+ }
+ throw new Exception("Result for " + scriptFile
+ + " changed at line " + num + ":\n< "
+ + lineExpected + "\n> ");
+ }
+
+ if (!equalStrings(lineExpected.split("Timestamp")[0],
+ lineActual.split("Timestamp")[0])) {
+ fail("Result for " + scriptFile + " changed at line " + num
+ + ":\n< " + lineExpected + "\n> " + lineActual);
+ }
+
+ ++num;
+ }
+ lineActual = readerActual.readLine();
+ // Assert.assertEquals(null, lineActual);
+ if (lineActual != null) {
+ throw new Exception("Result for " + scriptFile
+ + " changed at line " + num + ":\n< \n> " + lineActual);
+ }
+ // actualFile.delete();
+ } finally {
+ readerExpected.close();
+ readerActual.close();
+ }
+
+ }
+
+ private static boolean equalStrings(String s1, String s2) {
+ String[] rowsOne = s1.split("\n");
+ String[] rowsTwo = s2.split("\n");
+
+ for (int i = 0; i < rowsOne.length; i++) {
+ String row1 = rowsOne[i];
+ String row2 = rowsTwo[i];
+
+ if (row1.equals(row2))
+ continue;
+
+ String[] fields1 = row1.split(" ");
+ String[] fields2 = row2.split(" ");
+
+ for (int j = 0; j < fields1.length; j++) {
+ if (fields1[j].equals(fields2[j])) {
+ continue;
+ } else if (fields1[j].indexOf('.') < 0) {
+ return false;
+ } else {
+ fields1[j] = fields1[j].split(",")[0];
+ fields2[j] = fields2[j].split(",")[0];
+ Double double1 = Double.parseDouble(fields1[j]);
+ Double double2 = Double.parseDouble(fields2[j]);
+ float float1 = (float) double1.doubleValue();
+ float float2 = (float) double2.doubleValue();
+
+ if (Math.abs(float1 - float2) == 0)
+ continue;
+ else {
+ return false;
+ }
+ }
+ }
+ }
+ return true;
+ }
+
+ public static String aqlExtToResExt(String fname) {
+ int dot = fname.lastIndexOf('.');
+ return fname.substring(0, dot + 1) + EXTENSION_AQL_RESULT;
+ }
+
+ public static void writeResultsToFile(File actualFile, JSONObject result)
+ throws IOException, JSONException {
+ BufferedWriter writer = new BufferedWriter(new FileWriter(actualFile));
+ Results res = new Results(result);
+ for (String line : res) {
+ writer.write(line);
+ writer.newLine();
+ }
+ writer.close();
+ }
+
+ public static class Results implements Iterable<String> {
+ private final JSONArray chunks;
+
+ public Results(JSONObject result) throws JSONException {
+ chunks = result.getJSONArray("results");
+ }
+
+ public Iterator<String> iterator() {
+ return new ResultIterator(chunks);
+ }
+ }
+
+ public static class ResultIterator implements Iterator<String> {
+ private final JSONArray chunks;
+
+ private int chunkCounter = 0;
+ private int recordCounter = 0;
+
+ public ResultIterator(JSONArray chunks) {
+ this.chunks = chunks;
+ }
+
+ @Override
+ public boolean hasNext() {
+ JSONArray resultArray;
+ try {
+ resultArray = chunks.getJSONArray(chunkCounter);
+ if (resultArray.getString(recordCounter) != null) {
+ return true;
+ }
+ } catch (JSONException e) {
+ return false;
+ }
+ return false;
+ }
+
+ @Override
+ public String next() throws NoSuchElementException {
+ JSONArray resultArray;
+ String item = "";
+
+ try {
+ resultArray = chunks.getJSONArray(chunkCounter);
+ item = resultArray.getString(recordCounter);
+ if (item == null) {
+ throw new NoSuchElementException();
+ }
+ item = item.trim();
+
+ recordCounter++;
+ if (recordCounter >= resultArray.length()) {
+ chunkCounter++;
+ recordCounter = 0;
+ }
+ } catch (JSONException e) {
+ throw new NoSuchElementException(e.getMessage());
+ }
+ return item;
+ }
+
+ @Override
+ public void remove() {
+ throw new NotImplementedException();
+ }
+ }
+
+ // Executes Query and returns results as JSONArray
+ public static JSONObject executeQuery(String str) throws Exception {
+
+ final String url = "http://localhost:19101/query";
+
+ // Create an instance of HttpClient.
+ HttpClient client = new HttpClient();
+
+ // Create a method instance.
+ GetMethod method = new GetMethod(url);
+
+ method.setQueryString(new NameValuePair[] { new NameValuePair("query",
+ str) });
+
+ // Provide custom retry handler is necessary
+ method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
+ new DefaultHttpMethodRetryHandler(3, false));
+
+ JSONObject result = null;
+
+ try {
+ // Execute the method.
+ int statusCode = client.executeMethod(method);
+
+ // Check if the method was executed successfully.
+ if (statusCode != HttpStatus.SC_OK) {
+ System.err.println("Method failed: " + method.getStatusLine());
+ }
+
+ // Read the response body as String.
+ String responseBody = method.getResponseBodyAsString();
+
+ result = new JSONObject(responseBody);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ }
+ return result;
+ }
+
+ // To execute Update statements
+ // Insert and Delete statements are executed here
+ public static void executeUpdate(String str) throws Exception {
+ final String url = "http://localhost:19101/update";
+
+ // Create an instance of HttpClient.
+ HttpClient client = new HttpClient();
+
+ // Create a method instance.
+ GetMethod method = new GetMethod(url);
+
+ method.setQueryString(new NameValuePair[] { new NameValuePair(
+ "statements", str) });
+
+ // Provide custom retry handler is necessary
+ method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
+ new DefaultHttpMethodRetryHandler(3, false));
+
+ // Execute the method.
+ int statusCode = client.executeMethod(method);
+
+ // Check if the method was executed successfully.
+ if (statusCode != HttpStatus.SC_OK) {
+ System.err.println("Method failed: " + method.getStatusLine());
+ }
+ }
+
+ // To execute DDL and Update statements
+ // create type statement
+ // create dataset statement
+ // create index statement
+ // create dataverse statement
+ // create function statement
+ public static void executeDDL(String str) throws Exception {
+ final String url = "http://localhost:19101/ddl";
+
+ // Create an instance of HttpClient.
+ HttpClient client = new HttpClient();
+
+ // Create a method instance.
+ GetMethod method = new GetMethod(url);
+
+ method.setQueryString(new NameValuePair[] { new NameValuePair("ddl",
+ str) });
+
+ // Provide custom retry handler is necessary
+ method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER,
+ new DefaultHttpMethodRetryHandler(3, false));
+
+ // Execute the method.
+ int statusCode = client.executeMethod(method);
+
+ // Check if the method was executed successfully.
+ if (statusCode != HttpStatus.SC_OK) {
+ System.err.println("Method failed: " + method.getStatusLine());
+ }
+ }
+
+ // Method that reads a DDL/Update/Query File
+ // and returns the contents as a string
+ // This string is later passed to REST API for execution.
+ public static String readTestFile(File testFile) throws Exception {
+ BufferedReader reader = new BufferedReader(new FileReader(testFile));
+ String line = null;
+ StringBuilder stringBuilder = new StringBuilder();
+ String ls = System.getProperty("line.separator");
+
+ while ((line = reader.readLine()) != null) {
+ stringBuilder.append(line);
+ stringBuilder.append(ls);
+ }
+
+ return stringBuilder.toString();
+ }
+
+ public static void executeManagixCommand(String command)
+ throws ClassNotFoundException, NoSuchMethodException,
+ SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
+ if (managixExecuteMethod == null) {
+ Class clazz = Class
+ .forName("edu.uci.ics.asterix.installer.test.AsterixInstallerIntegrationUtil");
+ managixExecuteMethod = clazz.getMethod("executeCommand",
+ String.class);
+ }
+ managixExecuteMethod.invoke(null, command);
+ }
+
+ public static void executeTest(String actualPath,
+ TestCaseContext testCaseCtx) throws Exception {
+
+ File testFile;
+ File expectedResultFile;
+ String statement;
+ List<TestFileContext> expectedResultFileCtxs;
+ List<TestFileContext> testFileCtxs;
+
+ int queryCount = 0;
+ JSONObject result;
+
+ List<CompilationUnit> cUnits = testCaseCtx.getTestCase()
+ .getCompilationUnit();
+ for (CompilationUnit cUnit : cUnits) {
+ LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath()
+ + "/" + cUnit.getName());
+
+ testFileCtxs = testCaseCtx.getTestFiles(cUnit);
+ expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
+
+ for (TestFileContext ctx : testFileCtxs) {
+ testFile = ctx.getFile();
+ statement = TestsUtils.readTestFile(testFile);
+ try {
+ switch (ctx.getType()) {
+ case "ddl":
+ TestsUtils.executeDDL(statement);
+ break;
+ case "update":
+ TestsUtils.executeUpdate(statement);
+ break;
+ case "query":
+ result = TestsUtils.executeQuery(statement);
+ if (!cUnit.getExpectedError().isEmpty()) {
+ if (!result.has("error")) {
+ throw new Exception("Test \"" + testFile
+ + "\" FAILED!");
+ }
+ } else {
+ expectedResultFile = expectedResultFileCtxs.get(
+ queryCount).getFile();
+
+ File actualFile = new File(actualPath
+ + File.separator
+ + testCaseCtx.getTestCase().getFilePath()
+ .replace(File.separator, "_") + "_"
+ + cUnit.getName() + ".adm");
+
+ File actualResultFile = testCaseCtx
+ .getActualResultFile(cUnit, new File(
+ actualPath));
+ actualResultFile.getParentFile().mkdirs();
+
+ TestsUtils.writeResultsToFile(actualFile, result);
+
+ TestsUtils.runScriptAndCompareWithResult(testFile,
+ new PrintWriter(System.err),
+ expectedResultFile, actualFile);
+ }
+ queryCount++;
+ break;
+ case "mgx":
+ executeManagixCommand(statement);
+ break;
+ default:
+ throw new IllegalArgumentException(
+ "No statements of type " + ctx.getType());
+ }
+ } catch (Exception e) {
+ if (cUnit.getExpectedError().isEmpty()) {
+ throw new Exception(
+ "Test \"" + testFile + "\" FAILED!", e);
+ }
+ }
+ }
+ }
+
+ }
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
index 91a18f3..070e6ea 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Dataset.java
@@ -40,7 +40,7 @@
private final Map<String, String> hints;
private final int datasetId;
// Type of pending operations with respect to atomic DDL operation
- private final int pendingOp;
+ private int pendingOp;
public Dataset(String dataverseName, String datasetName, String itemTypeName, IDatasetDetails datasetDetails,
Map<String, String> hints, DatasetType datasetType, int datasetId, int pendingOp) {
@@ -86,6 +86,10 @@
return pendingOp;
}
+ public void setPendingOp(int pendingOp) {
+ this.pendingOp = pendingOp;
+ }
+
@Override
public Object addToCache(MetadataCache cache) {
return cache.addDatasetIfNotExists(this);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
index 6d65730..aa29e5b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/Index.java
@@ -15,7 +15,6 @@
package edu.uci.ics.asterix.metadata.entities;
-import java.io.Serializable;
import java.util.List;
import edu.uci.ics.asterix.common.config.DatasetConfig.IndexType;
@@ -46,7 +45,7 @@
// Specific to NGRAM indexes.
private final int gramLength;
// Type of pending operations with respect to atomic DDL operation
- private final int pendingOp;
+ private int pendingOp;
public Index(String dataverseName, String datasetName, String indexName, IndexType indexType,
List<String> keyFieldNames, int gramLength, boolean isPrimaryIndex, int pendingOp) {
@@ -103,6 +102,10 @@
public int getPendingOp() {
return pendingOp;
}
+
+ public void setPendingOp(int pendingOp) {
+ this.pendingOp = pendingOp;
+ }
public boolean isSecondaryIndex() {
return !isPrimaryIndex();