Merged asterix_stabilization r1006:r1115.
git-svn-id: https://asterixdb.googlecode.com/svn/branches/asterix_fix_issue_224@1117 eaa15691-b419-025a-1212-ee371bd00084
diff --git a/asterix-algebra/pom.xml b/asterix-algebra/pom.xml
index f950800..a6f0ac4 100644
--- a/asterix-algebra/pom.xml
+++ b/asterix-algebra/pom.xml
@@ -5,10 +5,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-algebra</artifactId>
- <version>0.0.4-SNAPSHOT</version>
-
<build>
<plugins>
<plugin>
@@ -60,8 +57,7 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>org.json</groupId>
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java
index 89d78f2..baf16c2 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/FuzzyUtils.java
@@ -35,6 +35,7 @@
return AsterixBuiltinFunctions.COUNTHASHED_WORD_TOKENS;
case UNORDEREDLIST:
case ORDEREDLIST:
+ case ANY:
return null;
default:
throw new NotImplementedException("No tokenizer for type " + inputTag);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
index d33029b..f63cc0c 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/base/RuleCollections.java
@@ -28,7 +28,9 @@
import edu.uci.ics.asterix.optimizer.rules.FuzzyEqRule;
import edu.uci.ics.asterix.optimizer.rules.FuzzyJoinRule;
import edu.uci.ics.asterix.optimizer.rules.IfElseToSwitchCaseFunctionRule;
+import edu.uci.ics.asterix.optimizer.rules.InlineUnnestFunctionRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceDynamicTypeCastRule;
+import edu.uci.ics.asterix.optimizer.rules.IntroduceEnforcedListTypeRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceSecondaryIndexInsertDeleteRule;
import edu.uci.ics.asterix.optimizer.rules.IntroduceStaticTypeCastRule;
import edu.uci.ics.asterix.optimizer.rules.LoadRecordFieldsRule;
@@ -71,6 +73,7 @@
import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceAggregateCombinerRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByCombinerRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceGroupByForSubplanRule;
+import edu.uci.ics.hyracks.algebricks.rewriter.rules.IntroduceProjectsRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.IsolateHyracksOperatorsRule;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PullSelectOutOfEqJoin;
import edu.uci.ics.hyracks.algebricks.rewriter.rules.PushAssignBelowUnionAllRule;
@@ -95,6 +98,7 @@
public final static List<IAlgebraicRewriteRule> buildTypeInferenceRuleCollection() {
List<IAlgebraicRewriteRule> typeInfer = new LinkedList<IAlgebraicRewriteRule>();
+ typeInfer.add(new InlineUnnestFunctionRule());
typeInfer.add(new InferTypesRule());
return typeInfer;
}
@@ -106,14 +110,15 @@
normalization.add(new BreakSelectIntoConjunctsRule());
normalization.add(new ExtractGbyExpressionsRule());
normalization.add(new ExtractDistinctByExpressionsRule());
- normalization.add(new ExtractOrderExpressionsRule());
+ normalization.add(new ExtractOrderExpressionsRule());
normalization.add(new ExtractCommonExpressionsRule());
-
+
// IntroduceStaticTypeCastRule should go before
// IntroduceDynamicTypeCastRule to
// avoid unnecessary dynamic casting
normalization.add(new IntroduceStaticTypeCastRule());
normalization.add(new IntroduceDynamicTypeCastRule());
+ normalization.add(new IntroduceEnforcedListTypeRule());
normalization.add(new ConstantFoldingRule());
normalization.add(new UnnestToDataScanRule());
normalization.add(new IfElseToSwitchCaseFunctionRule());
@@ -124,7 +129,7 @@
public final static List<IAlgebraicRewriteRule> buildCondPushDownAndJoinInferenceRuleCollection() {
List<IAlgebraicRewriteRule> condPushDownAndJoinInference = new LinkedList<IAlgebraicRewriteRule>();
-
+
condPushDownAndJoinInference.add(new PushSelectDownRule());
condPushDownAndJoinInference.add(new PushDieUpRule());
condPushDownAndJoinInference.add(new RemoveRedundantListifyRule());
@@ -138,17 +143,17 @@
condPushDownAndJoinInference.add(new IntroduceGroupByForSubplanRule());
condPushDownAndJoinInference.add(new SubplanOutOfGroupRule());
condPushDownAndJoinInference.add(new InsertOuterJoinRule());
-
+
condPushDownAndJoinInference.add(new RemoveRedundantVariablesRule());
condPushDownAndJoinInference.add(new AsterixInlineVariablesRule());
condPushDownAndJoinInference.add(new RemoveUnusedAssignAndAggregateRule());
-
+
condPushDownAndJoinInference.add(new FactorRedundantGroupAndDecorVarsRule());
condPushDownAndJoinInference.add(new PushAggregateIntoGroupbyRule());
condPushDownAndJoinInference.add(new EliminateSubplanRule());
condPushDownAndJoinInference.add(new PushProperJoinThroughProduct());
condPushDownAndJoinInference.add(new PushGroupByThroughProduct());
- condPushDownAndJoinInference.add(new NestGroupByRule());
+ condPushDownAndJoinInference.add(new NestGroupByRule());
return condPushDownAndJoinInference;
}
@@ -164,7 +169,7 @@
fieldLoads.add(new RemoveUnusedAssignAndAggregateRule());
fieldLoads.add(new ConstantFoldingRule());
fieldLoads.add(new FeedScanCollectionToUnnest());
- fieldLoads.add(new ComplexJoinInferenceRule());
+ fieldLoads.add(new ComplexJoinInferenceRule());
return fieldLoads;
}
@@ -187,7 +192,7 @@
consolidation.add(new RemoveRedundantGroupByDecorVars());
return consolidation;
}
-
+
public final static List<IAlgebraicRewriteRule> buildAccessMethodRuleCollection() {
List<IAlgebraicRewriteRule> accessMethod = new LinkedList<IAlgebraicRewriteRule>();
accessMethod.add(new IntroduceSelectAccessMethodRule());
@@ -200,7 +205,7 @@
}
public final static List<IAlgebraicRewriteRule> buildPlanCleanupRuleCollection() {
- List<IAlgebraicRewriteRule> planCleanupRules = new LinkedList<IAlgebraicRewriteRule>();
+ List<IAlgebraicRewriteRule> planCleanupRules = new LinkedList<IAlgebraicRewriteRule>();
planCleanupRules.add(new PushAssignBelowUnionAllRule());
planCleanupRules.add(new ExtractCommonExpressionsRule());
planCleanupRules.add(new RemoveRedundantVariablesRule());
@@ -225,7 +230,6 @@
physicalRewritesAllLevels.add(new IntroHashPartitionMergeExchange());
physicalRewritesAllLevels.add(new SetClosedRecordConstructorsRule());
physicalRewritesAllLevels.add(new PullPositionalVariableFromUnnestRule());
- //physicalRewritesAllLevels.add(new IntroduceProjectRule());
physicalRewritesAllLevels.add(new PushProjectDownRule());
physicalRewritesAllLevels.add(new InsertProjectBeforeUnionRule());
physicalRewritesAllLevels.add(new InlineSingleReferenceVariablesRule());
@@ -240,8 +244,9 @@
List<IAlgebraicRewriteRule> physicalRewritesTopLevel = new LinkedList<IAlgebraicRewriteRule>();
physicalRewritesTopLevel.add(new PushNestedOrderByUnderPreSortedGroupByRule());
physicalRewritesTopLevel.add(new PushLimitDownRule());
- //physicalRewritesTopLevel.add(new IntroduceProjectsRule());
- //physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
+ physicalRewritesTopLevel.add(new IntroduceProjectsRule());
+ physicalRewritesTopLevel.add(new SetAlgebricksPhysicalOperatorsRule());
+ physicalRewritesTopLevel.add(new SetExecutionModeRule());
return physicalRewritesTopLevel;
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
index c1d0fea..bc61c49 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/ConstantFoldingRule.java
@@ -28,6 +28,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryIntegerInspector;
import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -37,7 +38,10 @@
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
@@ -103,8 +107,8 @@
private static final JobGenContext _jobGenCtx = new JobGenContext(null, null, null,
AqlSerializerDeserializerProvider.INSTANCE, AqlBinaryHashFunctionFactoryProvider.INSTANCE,
- AqlBinaryComparatorFactoryProvider.INSTANCE, AqlTypeTraitProvider.INSTANCE,
- AqlBinaryBooleanInspectorImpl.FACTORY, AqlBinaryIntegerInspector.FACTORY,
+ AqlBinaryHashFunctionFamilyProvider.INSTANCE, AqlBinaryComparatorFactoryProvider.INSTANCE,
+ AqlTypeTraitProvider.INSTANCE, AqlBinaryBooleanInspectorImpl.FACTORY, AqlBinaryIntegerInspector.FACTORY,
AqlPrinterFactoryProvider.INSTANCE, AqlNullWriterFactory.INSTANCE, null,
new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(AqlLogicalExpressionJobGen.INSTANCE),
AqlExpressionTypeComputer.INSTANCE, AqlNullableTypeComputer.INSTANCE, null, null, null,
@@ -163,11 +167,23 @@
if (!checkArgs(expr)) {
return new Pair<Boolean, ILogicalExpression>(changed, expr);
}
- // TODO: currently ARecord is always a closed record
+ //Current ARecord SerDe assumes a closed record, so we do not constant fold open record constructors
if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)
|| expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
return new Pair<Boolean, ILogicalExpression>(false, null);
}
+ //Current List SerDe assumes a strongly typed list, so we do not constant fold the list constructors if they are not strongly typed
+ if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)
+ || expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
+ AbstractCollectionType listType = (AbstractCollectionType) TypeComputerUtilities.getRequiredType(expr);
+ if (listType != null
+ && (listType.getItemType().getTypeTag() == ATypeTag.ANY || listType.getItemType() instanceof AbstractCollectionType)) {
+ //case1: listType == null, could be a nested list inside a list<ANY>
+ //case2: itemType = ANY
+ //case3: itemType = a nested list
+ return new Pair<Boolean, ILogicalExpression>(false, null);
+ }
+ }
if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME)) {
ARecordType rt = (ARecordType) _emptyTypeEnv.getType(expr.getArguments().get(0).getValue());
String str = ((AString) ((AsterixConstantValue) ((ConstantExpression) expr.getArguments().get(1)
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java
index 77c719d..345f68a 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyEqRule.java
@@ -99,9 +99,16 @@
inputExprTypes.add(t.getTypeTag());
} else if (inputExp.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
// Hack to make sure that we will add the func call as is, without wrapping a tokenizer around.
- inputTypeTag = ATypeTag.UNORDEREDLIST;
+ IAType type = (IAType) context.getExpressionTypeComputer().getType(inputExp, metadataProvider, env);
+ inputTypeTag = type.getTypeTag();
+ // Only auto-tokenize strings.
+ if (inputTypeTag == ATypeTag.STRING) {
+ // Strings will be auto-tokenized.
+ inputTypeTag = ATypeTag.UNORDEREDLIST;
+ } else {
+ useExprAsIs = true;
+ }
inputExprTypes.add(inputTypeTag);
- useExprAsIs = true;
} else if (inputExp.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
ConstantExpression inputConst = (ConstantExpression) inputExp;
AsterixConstantValue constVal = (AsterixConstantValue) inputConst.getValue();
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
index 035f1df..8936425 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/FuzzyJoinRule.java
@@ -191,6 +191,10 @@
List<LogicalVariable> leftInputPKs = context.findPrimaryKey(leftInputVar);
List<LogicalVariable> rightInputPKs = context.findPrimaryKey(rightInputVar);
+ // Bail if primary keys could not be inferred.
+ if (leftInputPKs == null || rightInputPKs == null) {
+ return false;
+ }
// primary key has only one variable
if (leftInputPKs.size() != 1 || rightInputPKs.size() != 1) {
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java
new file mode 100644
index 0000000..3e57ded
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/InlineUnnestFunctionRule.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This rule is to inline unnest functions that are hold by variables.
+ * This rule is to fix issue 201.
+ */
+public class InlineUnnestFunctionRule implements IAlgebraicRewriteRule {
+
+ @Override
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ return false;
+ }
+
+ @Override
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+ AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+ if (context.checkIfInDontApplySet(this, op1))
+ return false;
+ context.addToDontApplySet(this, op1);
+ if (op1.getOperatorTag() != LogicalOperatorTag.UNNEST)
+ return false;
+ UnnestOperator unnestOperator = (UnnestOperator) op1;
+ AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOperator.getExpressionRef()
+ .getValue();
+ //we only inline for the scan-collection function
+ if (expr.getFunctionIdentifier() != AsterixBuiltinFunctions.SCAN_COLLECTION)
+ return false;
+
+ // inline all variables from an unnesting function call
+ AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+ List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+ for (int i = 0; i < args.size(); i++) {
+ ILogicalExpression argExpr = args.get(i).getValue();
+ if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+ VariableReferenceExpression varExpr = (VariableReferenceExpression) argExpr;
+ inlineVariable(varExpr.getVariableReference(), unnestOperator);
+ }
+ }
+ return true;
+ }
+
+ /**
+ * This method is to inline one variable
+ *
+ * @param usedVar
+ * A variable that is used by the scan-collection function in the unnest operator
+ * @param unnestOp
+ * The unnest operator.
+ * @throws AlgebricksException
+ */
+ private void inlineVariable(LogicalVariable usedVar, UnnestOperator unnestOp) throws AlgebricksException {
+ AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOp.getExpressionRef().getValue();
+ List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList = new ArrayList<Pair<AbstractFunctionCallExpression, Integer>>();
+ getParentFunctionExpression(usedVar, expr, parentAndIndexList);
+ ILogicalExpression usedVarOrginExpr = findUsedVarOrigin(usedVar, unnestOp, (AbstractLogicalOperator) unnestOp
+ .getInputs().get(0).getValue());
+ if (usedVarOrginExpr != null) {
+ for (Pair<AbstractFunctionCallExpression, Integer> parentAndIndex : parentAndIndexList) {
+ //we only rewrite the top scan-collection function
+ if (parentAndIndex.first.getFunctionIdentifier() == AsterixBuiltinFunctions.SCAN_COLLECTION
+ && parentAndIndex.first == expr) {
+ unnestOp.getExpressionRef().setValue(usedVarOrginExpr);
+ }
+ }
+ }
+ }
+
+ private void getParentFunctionExpression(LogicalVariable usedVar, ILogicalExpression expr,
+ List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList) {
+ AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
+ List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+ for (int i = 0; i < args.size(); i++) {
+ ILogicalExpression argExpr = args.get(i).getValue();
+ if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+ VariableReferenceExpression varExpr = (VariableReferenceExpression) argExpr;
+ if (varExpr.getVariableReference().equals(usedVar))
+ parentAndIndexList.add(new Pair<AbstractFunctionCallExpression, Integer>(funcExpr, i));
+ }
+ if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ getParentFunctionExpression(usedVar, argExpr, parentAndIndexList);
+ }
+ }
+ }
+
+ private ILogicalExpression findUsedVarOrigin(LogicalVariable usedVar, AbstractLogicalOperator parentOp,
+ AbstractLogicalOperator currentOp) throws AlgebricksException {
+ ILogicalExpression ret = null;
+ if (currentOp.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+ List<LogicalVariable> producedVars = new ArrayList<LogicalVariable>();
+ VariableUtilities.getProducedVariables(currentOp, producedVars);
+ if (producedVars.contains(usedVar)) {
+ AssignOperator assignOp = (AssignOperator) currentOp;
+ int index = assignOp.getVariables().indexOf(usedVar);
+ ILogicalExpression returnedExpr = assignOp.getExpressions().get(index).getValue();
+ if (returnedExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) returnedExpr;
+ if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(funcExpr.getFunctionIdentifier())) {
+ // we only inline for unnest functions
+ removeUnecessaryAssign(parentOp, currentOp, assignOp, index);
+ ret = returnedExpr;
+ }
+ } else if (returnedExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+ //recusively inline
+ VariableReferenceExpression varExpr = (VariableReferenceExpression) returnedExpr;
+ LogicalVariable var = varExpr.getVariableReference();
+ ILogicalExpression finalExpr = findUsedVarOrigin(var, currentOp,
+ (AbstractLogicalOperator) currentOp.getInputs().get(0).getValue());
+ if (finalExpr != null) {
+ removeUnecessaryAssign(parentOp, currentOp, assignOp, index);
+ ret = finalExpr;
+ }
+ }
+ }
+ } else {
+ for (Mutable<ILogicalOperator> child : currentOp.getInputs()) {
+ ILogicalExpression expr = findUsedVarOrigin(usedVar, currentOp,
+ (AbstractLogicalOperator) child.getValue());
+ if (expr != null) {
+ ret = expr;
+ }
+ }
+ }
+ return ret;
+ }
+
+ private void removeUnecessaryAssign(AbstractLogicalOperator parentOp, AbstractLogicalOperator currentOp,
+ AssignOperator assignOp, int index) {
+ assignOp.getVariables().remove(index);
+ assignOp.getExpressions().remove(index);
+ if (assignOp.getVariables().size() == 0) {
+ int opIndex = parentOp.getInputs().indexOf(new MutableObject<ILogicalOperator>(currentOp));
+ parentOp.getInputs().get(opIndex).setValue(assignOp.getInputs().get(0).getValue());
+ }
+ }
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
index c41d908..2dce5f6 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -48,22 +48,18 @@
* recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
* cast of a open field and a matched closed field, and 3. put in null fields
* when necessary.
- *
* Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
* "name": "Person Three"} which confirms to closed type ( id: string, name:
* string, hobby: {{string}}? ) can be cast to an open type (id: string ), or
* vice versa.
- *
* However, if the input record is a variable, then we don't know its exact
* field layout at compile time. For example, records conforming to the same
* type can have different field orderings and different open parts. That's why
* we need dynamic type casting.
- *
* Note that as we can see in the example, the ordering of fields of a record is
* not required. Since the open/closed part of a record has completely different
* underlying memory/storage layout, a cast-record function will change the
* layout as specified at runtime.
- *
* Implementation wise, this rule checks the target dataset type and the input
* record type, and if the types are different, then it plugs in an assign with
* a cast-record function, and projects away the original (uncast) field.
@@ -80,9 +76,7 @@
throws AlgebricksException {
/**
* pattern match: sink insert assign
- *
* resulting plan: sink-insert-project-assign
- *
*/
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.SINK)
@@ -90,6 +84,9 @@
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
return false;
+ InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
+ if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
+ return false;
AbstractLogicalOperator op3 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
if (op3.getOperatorTag() != LogicalOperatorTag.ASSIGN)
return false;
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
new file mode 100644
index 0000000..2e8d530
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules;
+
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractAssignOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestOperator;
+import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
+
+/**
+ * This class is to enforce types for function expressions which contain list constructor function calls.
+ * The List constructor is very special because a nested list is of type List<ANY>.
+ * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+ * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
+ */
+public class IntroduceEnforcedListTypeRule implements IAlgebraicRewriteRule {
+
+ @Override
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ return false;
+ }
+
+ @Override
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
+ if (context.checkIfInDontApplySet(this, opRef.getValue()))
+ return false;
+ AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+ context.addToDontApplySet(this, opRef.getValue());
+ boolean changed = false;
+
+ /**
+ * rewrite list constructor types for list constructor functions
+ */
+ if (op1.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
+ AbstractAssignOperator assignOp = (AbstractAssignOperator) op1;
+ List<Mutable<ILogicalExpression>> expressions = assignOp.getExpressions();
+ IVariableTypeEnvironment env = assignOp.computeOutputTypeEnvironment(context);
+ changed = rewriteExpressions(expressions, env);
+ }
+ if (op1.getOperatorTag() == LogicalOperatorTag.UNNEST) {
+ AbstractUnnestOperator unnestOp = (AbstractUnnestOperator) op1;
+ List<Mutable<ILogicalExpression>> expressions = Collections.singletonList(unnestOp.getExpressionRef());
+ IVariableTypeEnvironment env = unnestOp.computeOutputTypeEnvironment(context);
+ changed = rewriteExpressions(expressions, env);
+ }
+ return changed;
+ }
+
+ private boolean rewriteExpressions(List<Mutable<ILogicalExpression>> expressions, IVariableTypeEnvironment env)
+ throws AlgebricksException {
+ boolean changed = false;
+ for (Mutable<ILogicalExpression> exprRef : expressions) {
+ ILogicalExpression expr = exprRef.getValue();
+ if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) expr;
+ IAType exprType = (IAType) env.getType(argFuncExpr);
+ changed = changed || StaticTypeCastUtil.rewriteListExpr(argFuncExpr, exprType, exprType, env);
+ }
+ }
+ return changed;
+ }
+
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java
index 7dc35fe..3aae2dd 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/IntroduceStaticTypeCastRule.java
@@ -16,26 +16,14 @@
package edu.uci.ics.asterix.optimizer.rules;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
import edu.uci.ics.asterix.metadata.declared.AqlDataSource;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AbstractCollectionType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.runtime.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
@@ -44,9 +32,7 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator;
@@ -58,22 +44,17 @@
* recursive way. It enables: 1. bag-based fields in a record, 2. bidirectional
* cast of a open field and a matched closed field, and 3. put in null fields
* when necessary. It should be fired before the constant folding rule.
- *
* This rule is not responsible for type casting between primitive types.
- *
* Here is an example: A record { "hobby": {{"music", "coding"}}, "id": "001",
* "name": "Person Three"} which confirms to closed type ( id: string, name:
* string, hobby: {{string}}? ) can be cast to an open type (id: string ), or
* vice versa.
- *
* Implementation wise: first, we match the record's type and its target dataset
* type to see if it is "cast-able"; second, if the types are cast-able, we
* embed the required type into the original producer expression. If the types
* are not cast-able, we throw a compile time exception.
- *
* Then, at runtime (not in this rule), the corresponding record/list
* constructors know what to do by checking the required output type.
- *
* TODO: right now record/list constructor of the cast result is not done in the
* ConstantFoldingRule and has to go to the runtime, because the
* ConstantFoldingRule uses ARecordSerializerDeserializer which seems to have
@@ -96,28 +77,31 @@
if (context.checkIfInDontApplySet(this, opRef.getValue()))
return false;
context.addToDontApplySet(this, opRef.getValue());
+
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
+ List<LogicalVariable> producedVariables = new ArrayList<LogicalVariable>();
+ LogicalVariable oldRecordVariable;
+
if (op1.getOperatorTag() != LogicalOperatorTag.SINK)
return false;
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
if (op2.getOperatorTag() != LogicalOperatorTag.INSERT_DELETE)
return false;
- AbstractLogicalOperator op3 = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
- if (op3.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+ InsertDeleteOperator insertDeleteOp = (InsertDeleteOperator) op2;
+ if (insertDeleteOp.getOperation() == InsertDeleteOperator.Kind.DELETE)
return false;
-
+ AbstractLogicalOperator assignOp = (AbstractLogicalOperator) op2.getInputs().get(0).getValue();
+ if (assignOp.getOperatorTag() != LogicalOperatorTag.ASSIGN)
+ return false;
/**
* get required record type
*/
InsertDeleteOperator insertDeleteOperator = (InsertDeleteOperator) op2;
- AssignOperator topAssignOperator = (AssignOperator) op3;
AqlDataSource dataSource = (AqlDataSource) insertDeleteOperator.getDataSource();
IAType[] schemaTypes = (IAType[]) dataSource.getSchemaTypes();
- ARecordType requiredRecordType = (ARecordType) schemaTypes[schemaTypes.length - 1];
+ IAType requiredRecordType = schemaTypes[schemaTypes.length - 1];
- /**
- * get input record type to the insert operator
- */
+ AssignOperator topAssignOperator = (AssignOperator) assignOp;
List<LogicalVariable> usedVariables = new ArrayList<LogicalVariable>();
VariableUtilities.getUsedVariables(topAssignOperator, usedVariables);
@@ -126,14 +110,12 @@
// empty
if (usedVariables.size() == 0)
return false;
- LogicalVariable oldRecordVariable = usedVariables.get(0);
+ oldRecordVariable = usedVariables.get(0);
LogicalVariable inputRecordVar = usedVariables.get(0);
IVariableTypeEnvironment env = topAssignOperator.computeOutputTypeEnvironment(context);
- ARecordType inputRecordType = (ARecordType) env.getVarType(inputRecordVar);
+ IAType inputRecordType = (IAType) env.getVarType(inputRecordVar);
- AbstractLogicalOperator currentOperator = topAssignOperator;
- List<LogicalVariable> producedVariables = new ArrayList<LogicalVariable>();
-
+ AbstractLogicalOperator currentOperator = assignOp;
/**
* find the assign operator for the "input record" to the insert_delete
* operator
@@ -153,13 +135,15 @@
List<Mutable<ILogicalExpression>> expressionRefs = originalAssign.getExpressions();
ILogicalExpression expr = expressionRefs.get(position).getValue();
if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- ScalarFunctionCallExpression funcExpr = (ScalarFunctionCallExpression) expr;
+ AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
// that expression has been rewritten, and it will not
// fail but just return false
- if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+ if (TypeComputerUtilities.getRequiredType(funcExpr) != null) {
+ context.computeAndSetTypeEnvironmentForOperator(assignOp);
return false;
- IVariableTypeEnvironment assignEnv = topAssignOperator.computeOutputTypeEnvironment(context);
- rewriteFuncExpr(funcExpr, requiredRecordType, inputRecordType, assignEnv);
+ }
+ IVariableTypeEnvironment assignEnv = assignOp.computeOutputTypeEnvironment(context);
+ StaticTypeCastUtil.rewriteFuncExpr(funcExpr, requiredRecordType, inputRecordType, assignEnv);
}
context.computeAndSetTypeEnvironmentForOperator(originalAssign);
}
@@ -172,249 +156,4 @@
return true;
}
- private void rewriteFuncExpr(ScalarFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
- IVariableTypeEnvironment env) throws AlgebricksException {
- if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
- rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType, env);
- } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
- rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType, env);
- } else if (reqType.getTypeTag().equals(ATypeTag.RECORD)) {
- rewriteRecordFuncExpr(funcExpr, (ARecordType) reqType, (ARecordType) inputType, env);
- }
- }
-
- /**
- * only called when funcExpr is record constructor
- *
- * @param funcExpr
- * record constructor function expression
- * @param requiredListType
- * required record type
- * @param inputRecordType
- * @param env
- * type environment
- * @throws AlgebricksException
- */
- private void rewriteRecordFuncExpr(ScalarFunctionCallExpression funcExpr, ARecordType requiredRecordType,
- ARecordType inputRecordType, IVariableTypeEnvironment env) throws AlgebricksException {
- // if already rewritten, the required type is not null
- if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
- return;
- TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredRecordType, inputRecordType);
- staticRecordTypeCast(funcExpr, requiredRecordType, inputRecordType, env);
- }
-
- /**
- * only called when funcExpr is list constructor
- *
- * @param funcExpr
- * list constructor function expression
- * @param requiredListType
- * required list type
- * @param inputListType
- * @param env
- * type environment
- * @throws AlgebricksException
- */
- private void rewriteListFuncExpr(ScalarFunctionCallExpression funcExpr, AbstractCollectionType requiredListType,
- AbstractCollectionType inputListType, IVariableTypeEnvironment env) throws AlgebricksException {
- if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
- return;
-
- TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredListType, inputListType);
- List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
-
- IAType itemType = requiredListType.getItemType();
- if (itemType == null || itemType.getTypeTag().equals(ATypeTag.ANY))
- return;
- IAType inputItemType = inputListType.getItemType();
- for (int j = 0; j < args.size(); j++) {
- ILogicalExpression arg = args.get(j).getValue();
- if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) arg;
- IAType currentItemType = (IAType) env.getType(argFunc);
- if (inputItemType == null || inputItemType == BuiltinType.ANY) {
- currentItemType = (IAType) env.getType(argFunc);
- rewriteFuncExpr(argFunc, itemType, currentItemType, env);
- } else {
- rewriteFuncExpr(argFunc, itemType, inputItemType, env);
- }
- }
- }
- }
-
- private void staticRecordTypeCast(ScalarFunctionCallExpression func, ARecordType reqType, ARecordType inputType,
- IVariableTypeEnvironment env) throws AlgebricksException {
- IAType[] reqFieldTypes = reqType.getFieldTypes();
- String[] reqFieldNames = reqType.getFieldNames();
- IAType[] inputFieldTypes = inputType.getFieldTypes();
- String[] inputFieldNames = inputType.getFieldNames();
-
- int[] fieldPermutation = new int[reqFieldTypes.length];
- boolean[] nullFields = new boolean[reqFieldTypes.length];
- boolean[] openFields = new boolean[inputFieldTypes.length];
-
- Arrays.fill(nullFields, false);
- Arrays.fill(openFields, true);
- Arrays.fill(fieldPermutation, -1);
-
- // forward match: match from actual to required
- boolean matched = false;
- for (int i = 0; i < inputFieldNames.length; i++) {
- String fieldName = inputFieldNames[i];
- IAType fieldType = inputFieldTypes[i];
-
- if (2 * i + 1 > func.getArguments().size())
- throw new AlgebricksException("expression index out of bound");
-
- // 2*i+1 is the index of field value expression
- ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
- matched = false;
- for (int j = 0; j < reqFieldNames.length; j++) {
- String reqFieldName = reqFieldNames[j];
- IAType reqFieldType = reqFieldTypes[j];
- if (fieldName.equals(reqFieldName)) {
- if (fieldType.equals(reqFieldType)) {
- fieldPermutation[j] = i;
- openFields[i] = false;
- matched = true;
-
- if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
- rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
- }
- break;
- }
-
- // match the optional field
- if (reqFieldType.getTypeTag() == ATypeTag.UNION
- && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
- IAType itemType = ((AUnionType) reqFieldType).getUnionList().get(
- NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
- reqFieldType = itemType;
- if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
- fieldPermutation[j] = i;
- openFields[i] = false;
- matched = true;
-
- // rewrite record expr
- if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
- rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
- }
- break;
- }
- }
-
- // match the record field: need cast
- if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
- rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
- fieldPermutation[j] = i;
- openFields[i] = false;
- matched = true;
- break;
- }
- }
- }
- // the input has extra fields
- if (!matched && !reqType.isOpen())
- throw new AlgebricksException("static type mismatch: including an extra closed field " + fieldName);
- }
-
- // backward match: match from required to actual
- for (int i = 0; i < reqFieldNames.length; i++) {
- String reqFieldName = reqFieldNames[i];
- IAType reqFieldType = reqFieldTypes[i];
- matched = false;
- for (int j = 0; j < inputFieldNames.length; j++) {
- String fieldName = inputFieldNames[j];
- IAType fieldType = inputFieldTypes[j];
- if (!fieldName.equals(reqFieldName))
- continue;
- // should check open field here
- // because number of entries in fieldPermuations is the
- // number of required schema fields
- // here we want to check if an input field is matched
- // the entry index of fieldPermuatons is req field index
- if (!openFields[j]) {
- matched = true;
- break;
- }
-
- // match the optional field
- if (reqFieldType.getTypeTag() == ATypeTag.UNION
- && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
- IAType itemType = ((AUnionType) reqFieldType).getUnionList().get(
- NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
- if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
- matched = true;
- break;
- }
- }
- }
- if (matched)
- continue;
-
- if (reqFieldType.getTypeTag() == ATypeTag.UNION
- && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
- // add a null field
- nullFields[i] = true;
- } else {
- // no matched field in the input for a required closed field
- throw new AlgebricksException("static type mismatch: miss a required closed field " + reqFieldName);
- }
- }
-
- List<Mutable<ILogicalExpression>> arguments = func.getArguments();
- List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
- originalArguments.addAll(arguments);
- arguments.clear();
- // re-order the closed part and fill in null fields
- for (int i = 0; i < fieldPermutation.length; i++) {
- int pos = fieldPermutation[i];
- if (pos >= 0) {
- arguments.add(originalArguments.get(2 * pos));
- arguments.add(originalArguments.get(2 * pos + 1));
- }
- if (nullFields[i]) {
- // add a null field
- arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
- new AString(reqFieldNames[i])))));
- arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
- ANull.NULL))));
- }
- }
-
- // add the open part
- for (int i = 0; i < openFields.length; i++) {
- if (openFields[i]) {
- arguments.add(originalArguments.get(2 * i));
- Mutable<ILogicalExpression> fExprRef = originalArguments.get(2 * i + 1);
- ILogicalExpression argExpr = fExprRef.getValue();
-
- // we need to handle open fields recursively by their default
- // types
- // for list, their item type is any
- // for record, their
- if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
- IAType reqFieldType = inputFieldTypes[i];
- if (inputFieldTypes[i].getTypeTag() == ATypeTag.RECORD) {
- reqFieldType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
- }
- if (inputFieldTypes[i].getTypeTag() == ATypeTag.ORDEREDLIST) {
- reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
- }
- if (inputFieldTypes[i].getTypeTag() == ATypeTag.UNORDEREDLIST) {
- reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
- }
- if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null) {
- ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
- rewriteFuncExpr(argFunc, reqFieldType, inputFieldTypes[i], env);
- }
- }
- arguments.add(fExprRef);
- }
- }
- }
}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index c121ff6..ad70d6f 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -92,47 +92,55 @@
boolean changed = false;
if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
ARecordType reqType = (ARecordType) TypeComputerUtilities.getRequiredType(expr);
- if (reqType != null) {
- if (reqType.isOpen())
- allClosed = false;
- }
- int n = expr.getArguments().size();
- if (n % 2 > 0) {
- throw new AlgebricksException("Record constructor expected to have an even number of arguments: "
- + expr);
- }
- for (int i = 0; i < n / 2; i++) {
- ILogicalExpression a0 = expr.getArguments().get(2 * i).getValue();
- if (a0.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
- allClosed = false;
+ if (reqType == null || !reqType.isOpen()) {
+ int n = expr.getArguments().size();
+ if (n % 2 > 0) {
+ throw new AlgebricksException(
+ "Record constructor expected to have an even number of arguments: " + expr);
}
- Mutable<ILogicalExpression> aRef1 = expr.getArguments().get(2 * i + 1);
- ILogicalExpression a1 = aRef1.getValue();
- ClosedDataInfo cdi = a1.accept(this, arg);
- if (!cdi.dataIsClosed) {
- allClosed = false;
+ for (int i = 0; i < n / 2; i++) {
+ ILogicalExpression a0 = expr.getArguments().get(2 * i).getValue();
+ if (a0.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
+ allClosed = false;
+ }
+ Mutable<ILogicalExpression> aRef1 = expr.getArguments().get(2 * i + 1);
+ ILogicalExpression a1 = aRef1.getValue();
+ ClosedDataInfo cdi = a1.accept(this, arg);
+ if (!cdi.dataIsClosed) {
+ allClosed = false;
+ }
+ if (cdi.expressionChanged) {
+ aRef1.setValue(cdi.expression);
+ changed = true;
+ }
}
- if (cdi.expressionChanged) {
- aRef1.setValue(cdi.expression);
+ if (allClosed) {
+ expr.setFunctionInfo(FunctionUtils
+ .getFunctionInfo(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
+ GlobalConfig.ASTERIX_LOGGER.finest("Switching to CLOSED record constructor in " + expr + ".\n");
changed = true;
}
}
- if (allClosed) {
- expr.setFunctionInfo(FunctionUtils
- .getFunctionInfo(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
- GlobalConfig.ASTERIX_LOGGER.finest("Switching to CLOSED record constructor in " + expr + ".\n");
- changed = true;
- }
} else {
- for (Mutable<ILogicalExpression> e : expr.getArguments()) {
- ILogicalExpression ale = e.getValue();
- ClosedDataInfo cdi = ale.accept(this, arg);
- if (!cdi.dataIsClosed) {
- allClosed = false;
+ boolean rewrite = true;
+ if (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)
+ || (expr.getFunctionIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR))) {
+ IAType reqType = TypeComputerUtilities.getRequiredType(expr);
+ if (reqType == null) {
+ rewrite = false;
}
- if (cdi.expressionChanged) {
- e.setValue(cdi.expression);
- changed = true;
+ }
+ if (rewrite) {
+ for (Mutable<ILogicalExpression> e : expr.getArguments()) {
+ ILogicalExpression ale = e.getValue();
+ ClosedDataInfo cdi = ale.accept(this, arg);
+ if (!cdi.dataIsClosed) {
+ allClosed = false;
+ }
+ if (cdi.expressionChanged) {
+ e.setValue(cdi.expression);
+ changed = true;
+ }
}
}
}
@@ -144,7 +152,8 @@
throws AlgebricksException {
Object varType = env.getVarType(expr.getVariableReference());
if (varType == null) {
- throw new AlgebricksException("Could not infer type for variable '" + expr.getVariableReference() + "'.");
+ throw new AlgebricksException("Could not infer type for variable '" + expr.getVariableReference()
+ + "'.");
}
boolean dataIsClosed = isClosedRec((IAType) varType);
return new ClosedDataInfo(false, dataIsClosed, expr);
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
new file mode 100644
index 0000000..3ba3e96
--- /dev/null
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/typecast/StaticTypeCastUtil.java
@@ -0,0 +1,430 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.optimizer.rules.typecast;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.commons.lang3.mutable.Mutable;
+import org.apache.commons.lang3.mutable.MutableObject;
+
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.AsterixFunctionInfo;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.typecomputer.base.TypeComputerUtilities;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AbstractCollectionType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
+
+/**
+ * This class is utility to do type cast.
+ * It offers two public methods:
+ * 1. public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ * IVariableTypeEnvironment env) throws AlgebricksException, which only enforces the list type recursively.
+ * 2. public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ * IVariableTypeEnvironment env) throws AlgebricksException, which enforces the list type and the record type recursively.
+ *
+ * @author yingyib
+ */
+public class StaticTypeCastUtil {
+
+ /**
+ * This method is only called when funcExpr contains list constructor function calls.
+ * The List constructor is very special because a nested list is of type List<ANY>.
+ * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+ * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceEnforcedListTypeRule} .
+ *
+ * @param funcExpr
+ * record constructor function expression
+ * @param requiredListType
+ * required record type
+ * @param inputRecordType
+ * @param env
+ * type environment
+ * @throws AlgebricksException
+ */
+ public static boolean rewriteListExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ IVariableTypeEnvironment env) throws AlgebricksException {
+ if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
+ if (reqType.equals(BuiltinType.ANY)) {
+ reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+ }
+ return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+ env);
+ } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
+ if (reqType.equals(BuiltinType.ANY)) {
+ reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+ }
+ return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+ env);
+ } else {
+ List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+ boolean changed = false;
+ for (Mutable<ILogicalExpression> arg : args) {
+ ILogicalExpression argExpr = arg.getValue();
+ if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+ IAType exprType = (IAType) env.getType(argFuncExpr);
+ changed = changed || rewriteListExpr(argFuncExpr, exprType, exprType, env);
+ }
+ }
+ return changed;
+ }
+ }
+
+ /**
+ * This method is to recursively enforce required types, for the list type and the record type.
+ * The List constructor is very special because
+ * 1. a nested list in a list is of type List<ANY>;
+ * 2. a nested record in a list is of type Open_Record{}.
+ * The open record constructor is very special because
+ * 1. a nested list in the open part is of type List<ANY>;
+ * 2. a nested record in the open part is of type Open_Record{}.
+ * However, the bottom-up type inference (InferTypeRule in algebricks) did not infer that so we need this method to enforce the type.
+ * We do not want to break the generality of algebricks so this method is called in an ASTERIX rule: @ IntroduceStaticTypeCastRule} .
+ *
+ * @param funcExpr
+ * the function expression whose type needs to be top-down enforced
+ * @param reqType
+ * the required type inferred from parent operators/expressions
+ * @param inputType
+ * the current inferred
+ * @param env
+ * the type environment
+ * @return true if the type is casted; otherwise, false.
+ * @throws AlgebricksException
+ */
+ public static boolean rewriteFuncExpr(AbstractFunctionCallExpression funcExpr, IAType reqType, IAType inputType,
+ IVariableTypeEnvironment env) throws AlgebricksException {
+ if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR) {
+ if (reqType.equals(BuiltinType.ANY)) {
+ reqType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+ }
+ return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+ env);
+ } else if (funcExpr.getFunctionIdentifier() == AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR) {
+ if (reqType.equals(BuiltinType.ANY)) {
+ reqType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+ }
+ return rewriteListFuncExpr(funcExpr, (AbstractCollectionType) reqType, (AbstractCollectionType) inputType,
+ env);
+ } else if (inputType.getTypeTag().equals(ATypeTag.RECORD)) {
+ if (reqType.equals(BuiltinType.ANY)) {
+ reqType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+ }
+ return rewriteRecordFuncExpr(funcExpr, (ARecordType) reqType, (ARecordType) inputType, env);
+ } else {
+ List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+ boolean changed = false;
+ for (Mutable<ILogicalExpression> arg : args) {
+ ILogicalExpression argExpr = arg.getValue();
+ if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ AbstractFunctionCallExpression argFuncExpr = (AbstractFunctionCallExpression) argExpr;
+ IAType exprType = (IAType) env.getType(argFuncExpr);
+ changed = changed || rewriteFuncExpr(argFuncExpr, exprType, exprType, env);
+ }
+ }
+ return changed;
+ }
+ }
+
+ /**
+ * only called when funcExpr is record constructor
+ *
+ * @param funcExpr
+ * record constructor function expression
+ * @param requiredListType
+ * required record type
+ * @param inputRecordType
+ * @param env
+ * type environment
+ * @throws AlgebricksException
+ */
+ private static boolean rewriteRecordFuncExpr(AbstractFunctionCallExpression funcExpr,
+ ARecordType requiredRecordType, ARecordType inputRecordType, IVariableTypeEnvironment env)
+ throws AlgebricksException {
+ // if already rewritten, the required type is not null
+ if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+ return false;
+ TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredRecordType, inputRecordType);
+ staticRecordTypeCast(funcExpr, requiredRecordType, inputRecordType, env);
+ return true;
+ }
+
+ /**
+ * only called when funcExpr is list constructor
+ *
+ * @param funcExpr
+ * list constructor function expression
+ * @param requiredListType
+ * required list type
+ * @param inputListType
+ * @param env
+ * type environment
+ * @throws AlgebricksException
+ */
+ private static boolean rewriteListFuncExpr(AbstractFunctionCallExpression funcExpr,
+ AbstractCollectionType requiredListType, AbstractCollectionType inputListType, IVariableTypeEnvironment env)
+ throws AlgebricksException {
+ if (TypeComputerUtilities.getRequiredType(funcExpr) != null)
+ return false;
+
+ TypeComputerUtilities.setRequiredAndInputTypes(funcExpr, requiredListType, inputListType);
+ List<Mutable<ILogicalExpression>> args = funcExpr.getArguments();
+
+ IAType itemType = requiredListType.getItemType();
+ IAType inputItemType = inputListType.getItemType();
+ for (int j = 0; j < args.size(); j++) {
+ ILogicalExpression arg = args.get(j).getValue();
+ if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) arg;
+ IAType currentItemType = (IAType) env.getType(argFunc);
+ if (inputItemType == null || inputItemType == BuiltinType.ANY) {
+ currentItemType = (IAType) env.getType(argFunc);
+ rewriteFuncExpr(argFunc, itemType, currentItemType, env);
+ } else {
+ rewriteFuncExpr(argFunc, itemType, inputItemType, env);
+ }
+ }
+ }
+ return true;
+ }
+
+ /**
+ * This method statically cast the type of records from their current type to the required type.
+ *
+ * @param func
+ * The record constructor expression.
+ * @param reqType
+ * The required type.
+ * @param inputType
+ * The current type.
+ * @param env
+ * The type environment.
+ * @throws AlgebricksException
+ */
+ private static void staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType,
+ ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
+ IAType[] reqFieldTypes = reqType.getFieldTypes();
+ String[] reqFieldNames = reqType.getFieldNames();
+ IAType[] inputFieldTypes = inputType.getFieldTypes();
+ String[] inputFieldNames = inputType.getFieldNames();
+
+ int[] fieldPermutation = new int[reqFieldTypes.length];
+ boolean[] nullFields = new boolean[reqFieldTypes.length];
+ boolean[] openFields = new boolean[inputFieldTypes.length];
+
+ Arrays.fill(nullFields, false);
+ Arrays.fill(openFields, true);
+ Arrays.fill(fieldPermutation, -1);
+
+ // forward match: match from actual to required
+ boolean matched = false;
+ for (int i = 0; i < inputFieldNames.length; i++) {
+ String fieldName = inputFieldNames[i];
+ IAType fieldType = inputFieldTypes[i];
+
+ if (2 * i + 1 > func.getArguments().size())
+ throw new AlgebricksException("expression index out of bound");
+
+ // 2*i+1 is the index of field value expression
+ ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
+ matched = false;
+ for (int j = 0; j < reqFieldNames.length; j++) {
+ String reqFieldName = reqFieldNames[j];
+ IAType reqFieldType = reqFieldTypes[j];
+ if (fieldName.equals(reqFieldName)) {
+ //type matched
+ if (fieldType.equals(reqFieldType)) {
+ fieldPermutation[j] = i;
+ openFields[i] = false;
+ matched = true;
+
+ if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+ rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+ }
+ break;
+ }
+
+ // match the optional field
+ if (reqFieldType.getTypeTag() == ATypeTag.UNION
+ && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
+ IAType itemType = ((AUnionType) reqFieldType).getUnionList().get(
+ NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
+ reqFieldType = itemType;
+ if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
+ fieldPermutation[j] = i;
+ openFields[i] = false;
+ matched = true;
+
+ // rewrite record expr
+ if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+ rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+ }
+ break;
+ }
+ }
+
+ // match the optional type input for a non-optional field
+ // delay that to runtime by calling the not-null function
+ if (fieldType.getTypeTag() == ATypeTag.UNION
+ && NonTaggedFormatUtil.isOptionalField((AUnionType) fieldType)) {
+ IAType itemType = ((AUnionType) fieldType).getUnionList().get(
+ NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
+ if (reqFieldType.equals(itemType)) {
+ fieldPermutation[j] = i;
+ openFields[i] = false;
+ matched = true;
+
+ ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
+ new AsterixFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
+ notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
+ //wrap the not null function to the original function
+ func.getArguments().get(2 * i + 1).setValue(notNullFunc);
+ break;
+ }
+ }
+
+ // match the record field: need cast
+ if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
+ rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
+ fieldPermutation[j] = i;
+ openFields[i] = false;
+ matched = true;
+ break;
+ }
+ }
+ }
+ // the input has extra fields
+ if (!matched && !reqType.isOpen())
+ throw new AlgebricksException("static type mismatch: including an extra closed field " + fieldName);
+ }
+
+ // backward match: match from required to actual
+ for (int i = 0; i < reqFieldNames.length; i++) {
+ String reqFieldName = reqFieldNames[i];
+ IAType reqFieldType = reqFieldTypes[i];
+ matched = false;
+ for (int j = 0; j < inputFieldNames.length; j++) {
+ String fieldName = inputFieldNames[j];
+ IAType fieldType = inputFieldTypes[j];
+ if (!fieldName.equals(reqFieldName))
+ continue;
+ // should check open field here
+ // because number of entries in fieldPermuations is the
+ // number of required schema fields
+ // here we want to check if an input field is matched
+ // the entry index of fieldPermuatons is req field index
+ if (!openFields[j]) {
+ matched = true;
+ break;
+ }
+
+ // match the optional field
+ if (reqFieldType.getTypeTag() == ATypeTag.UNION
+ && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
+ IAType itemType = ((AUnionType) reqFieldType).getUnionList().get(
+ NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
+ if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
+ matched = true;
+ break;
+ }
+ }
+ }
+ if (matched)
+ continue;
+
+ if (reqFieldType.getTypeTag() == ATypeTag.UNION
+ && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
+ // add a null field
+ nullFields[i] = true;
+ } else {
+ // no matched field in the input for a required closed field
+ throw new AlgebricksException("static type mismatch: miss a required closed field " + reqFieldName);
+ }
+ }
+
+ List<Mutable<ILogicalExpression>> arguments = func.getArguments();
+ List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
+ originalArguments.addAll(arguments);
+ arguments.clear();
+ // re-order the closed part and fill in null fields
+ for (int i = 0; i < fieldPermutation.length; i++) {
+ int pos = fieldPermutation[i];
+ if (pos >= 0) {
+ arguments.add(originalArguments.get(2 * pos));
+ arguments.add(originalArguments.get(2 * pos + 1));
+ }
+ if (nullFields[i]) {
+ // add a null field
+ arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+ new AString(reqFieldNames[i])))));
+ arguments.add(new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
+ ANull.NULL))));
+ }
+ }
+
+ // add the open part
+ for (int i = 0; i < openFields.length; i++) {
+ if (openFields[i]) {
+ arguments.add(originalArguments.get(2 * i));
+ Mutable<ILogicalExpression> fExprRef = originalArguments.get(2 * i + 1);
+ ILogicalExpression argExpr = fExprRef.getValue();
+
+ // we need to handle open fields recursively by their default
+ // types
+ // for list, their item type is any
+ // for record, their
+ if (argExpr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
+ IAType reqFieldType = inputFieldTypes[i];
+ if (inputFieldTypes[i].getTypeTag() == ATypeTag.RECORD) {
+ reqFieldType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+ }
+ if (inputFieldTypes[i].getTypeTag() == ATypeTag.ORDEREDLIST) {
+ reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE;
+ }
+ if (inputFieldTypes[i].getTypeTag() == ATypeTag.UNORDEREDLIST) {
+ reqFieldType = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+ }
+ if (TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) argExpr) == null) {
+ ScalarFunctionCallExpression argFunc = (ScalarFunctionCallExpression) argExpr;
+ rewriteFuncExpr(argFunc, reqFieldType, inputFieldTypes[i], env);
+ }
+ }
+ arguments.add(fExprRef);
+ }
+ }
+ }
+
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
index a908363..9163e35 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/AbstractAqlTranslator.java
@@ -1,78 +1,103 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.translator;
import java.util.Map;
import edu.uci.ics.asterix.aql.base.Statement;
import edu.uci.ics.asterix.aql.expression.DataverseDropStatement;
+import edu.uci.ics.asterix.aql.expression.DeleteStatement;
import edu.uci.ics.asterix.aql.expression.DropStatement;
-import edu.uci.ics.asterix.aql.expression.FunctionDropStatement;
+import edu.uci.ics.asterix.aql.expression.InsertStatement;
import edu.uci.ics.asterix.aql.expression.NodeGroupDropStatement;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap;
-import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinArtifactMap.ARTIFACT_KIND;
import edu.uci.ics.asterix.metadata.entities.AsterixBuiltinTypeMap;
import edu.uci.ics.asterix.metadata.entities.Dataverse;
import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+/**
+ * Base class for AQL translators.
+ * Contains the common validation logic for AQL statements.
+ */
public abstract class AbstractAqlTranslator {
protected static final Map<String, BuiltinType> builtinTypeMap = AsterixBuiltinTypeMap.getBuiltinTypes();
- public void validateOperation(Dataverse dataverse, Statement stmt) throws AsterixException {
- String dataverseName = dataverse != null ? dataverse.getDataverseName() : null;
- if (dataverseName != null && dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
+ public void validateOperation(Dataverse defaultDataverse, Statement stmt) throws AsterixException {
+ boolean invalidOperation = false;
+ String message = null;
+ String dataverse = defaultDataverse != null ? defaultDataverse.getDataverseName() : null;
+ switch (stmt.getKind()) {
+ case INSERT:
+ InsertStatement insertStmt = (InsertStatement) stmt;
+ if (insertStmt.getDataverseName() != null) {
+ dataverse = insertStmt.getDataverseName().getValue();
+ }
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ if (invalidOperation) {
+ message = "Insert operation is not permitted in dataverse "
+ + MetadataConstants.METADATA_DATAVERSE_NAME;
+ }
+ break;
- boolean invalidOperation = false;
- String message = null;
- switch (stmt.getKind()) {
- case INSERT:
- case UPDATE:
- case DELETE:
- invalidOperation = true;
- message = " Operation " + stmt.getKind() + " not permitted in system dataverse " + "'"
- + MetadataConstants.METADATA_DATAVERSE_NAME + "'";
- break;
- case FUNCTION_DROP:
- FunctionSignature signature = ((FunctionDropStatement) stmt).getFunctionSignature();
- FunctionIdentifier fId = new FunctionIdentifier(signature.getNamespace(), signature.getName(),
- signature.getArity());
- if (dataverseName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)
- && AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.FUNCTION, fId)) {
- invalidOperation = true;
- message = "Cannot drop function " + signature + " (protected by system)";
- }
- break;
- case NODEGROUP_DROP:
- NodeGroupDropStatement nodeGroupDropStmt = (NodeGroupDropStatement) stmt;
- String nodegroupName = nodeGroupDropStmt.getNodeGroupName().getValue();
- if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.NODEGROUP, nodegroupName)) {
- message = "Cannot drop nodegroup " + nodegroupName + " (protected by system)";
- invalidOperation = true;
- }
- break;
- case DATAVERSE_DROP:
- DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
- String dvName = dvDropStmt.getDataverseName().getValue();
- if (dvName.equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
- message = "Cannot drop dataverse " + dvName + " (protected by system)";
- invalidOperation = true;
- }
- break;
- case DATASET_DROP:
- DropStatement dropStmt = (DropStatement) stmt;
- String datasetName = dropStmt.getDatasetName().getValue();
- if (AsterixBuiltinArtifactMap.isSystemProtectedArtifact(ARTIFACT_KIND.DATASET, datasetName)) {
- invalidOperation = true;
- message = "Cannot drop dataset " + datasetName + " (protected by system)";
- }
- break;
- }
- if (invalidOperation) {
- throw new AsterixException(message);
- }
+ case DELETE:
+ DeleteStatement deleteStmt = (DeleteStatement) stmt;
+ if (deleteStmt.getDataverseName() != null) {
+ dataverse = deleteStmt.getDataverseName().getValue();
+ }
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ if (invalidOperation) {
+ message = "Delete operation is not permitted in dataverse "
+ + MetadataConstants.METADATA_DATAVERSE_NAME;
+ }
+ break;
+
+ case NODEGROUP_DROP:
+ String nodegroupName = ((NodeGroupDropStatement) stmt).getNodeGroupName().getValue();
+ invalidOperation = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME.equals(nodegroupName);
+ if (invalidOperation) {
+ message = "Cannot drop nodegroup:" + nodegroupName;
+ }
+ break;
+
+ case DATAVERSE_DROP:
+ DataverseDropStatement dvDropStmt = (DataverseDropStatement) stmt;
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dvDropStmt.getDataverseName()
+ .getValue());
+ if (invalidOperation) {
+ message = "Cannot drop dataverse:" + dvDropStmt.getDataverseName().getValue();
+ }
+ break;
+
+ case DATASET_DROP:
+ DropStatement dropStmt = (DropStatement) stmt;
+ if (dropStmt.getDataverseName() != null) {
+ dataverse = dropStmt.getDataverseName().getValue();
+ }
+ invalidOperation = MetadataConstants.METADATA_DATAVERSE_NAME.equals(dataverse);
+ if (invalidOperation) {
+ message = "Cannot drop a dataset belonging to the dataverse:"
+ + MetadataConstants.METADATA_DATAVERSE_NAME;
+ }
+ break;
+
+ }
+
+ if (invalidOperation) {
+ throw new AsterixException("Invalid operation - " + message);
}
}
-}
\ No newline at end of file
+}
diff --git a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
index 6d52b8f..625fed1 100644
--- a/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
+++ b/asterix-algebra/src/main/java/edu/uci/ics/asterix/translator/CompiledStatements.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -43,501 +43,533 @@
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+/**
+ * An AQL statement instance is translated into an instance of type CompileX
+ * that has additional fields for use by the AqlTranslator.
+ */
public class CompiledStatements {
- public static interface ICompiledStatement {
+ public static interface ICompiledStatement {
- public Kind getKind();
- }
+ public Kind getKind();
+ }
- public static class CompiledWriteFromQueryResultStatement implements ICompiledDmlStatement {
+ public static class CompiledWriteFromQueryResultStatement implements
+ ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private Query query;
- private int varCounter;
+ private String dataverseName;
+ private String datasetName;
+ private Query query;
+ private int varCounter;
- public CompiledWriteFromQueryResultStatement(String dataverseName, String datasetName, Query query,
- int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledWriteFromQueryResultStatement(String dataverseName,
+ String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.WRITE_FROM_QUERY_RESULT;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.WRITE_FROM_QUERY_RESULT;
+ }
- }
+ }
- public static class CompiledDatasetDropStatement implements ICompiledStatement {
- private final String dataverseName;
- private final String datasetName;
+ public static class CompiledDatasetDropStatement implements
+ ICompiledStatement {
+ private final String dataverseName;
+ private final String datasetName;
- public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- }
+ public CompiledDatasetDropStatement(String dataverseName,
+ String datasetName) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- @Override
- public Kind getKind() {
- return Kind.DATASET_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DATASET_DROP;
+ }
+ }
- // added by yasser
- public static class CompiledCreateDataverseStatement implements ICompiledStatement {
- private String dataverseName;
- private String format;
+ // added by yasser
+ public static class CompiledCreateDataverseStatement implements
+ ICompiledStatement {
+ private String dataverseName;
+ private String format;
- public CompiledCreateDataverseStatement(String dataverseName, String format) {
- this.dataverseName = dataverseName;
- this.format = format;
- }
+ public CompiledCreateDataverseStatement(String dataverseName,
+ String format) {
+ this.dataverseName = dataverseName;
+ this.format = format;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getFormat() {
- return format;
- }
+ public String getFormat() {
+ return format;
+ }
- @Override
- public Kind getKind() {
- return Kind.CREATE_DATAVERSE;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_DATAVERSE;
+ }
+ }
- public static class CompiledNodeGroupDropStatement implements ICompiledStatement {
- private String nodeGroupName;
+ public static class CompiledNodeGroupDropStatement implements
+ ICompiledStatement {
+ private String nodeGroupName;
- public CompiledNodeGroupDropStatement(String nodeGroupName) {
- this.nodeGroupName = nodeGroupName;
- }
+ public CompiledNodeGroupDropStatement(String nodeGroupName) {
+ this.nodeGroupName = nodeGroupName;
+ }
- public String getNodeGroupName() {
- return nodeGroupName;
- }
+ public String getNodeGroupName() {
+ return nodeGroupName;
+ }
- @Override
- public Kind getKind() {
- return Kind.NODEGROUP_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.NODEGROUP_DROP;
+ }
+ }
- public static class CompiledIndexDropStatement implements ICompiledStatement {
- private String dataverseName;
- private String datasetName;
- private String indexName;
+ public static class CompiledIndexDropStatement implements
+ ICompiledStatement {
+ private String dataverseName;
+ private String datasetName;
+ private String indexName;
- public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.indexName = indexName;
- }
+ public CompiledIndexDropStatement(String dataverseName,
+ String datasetName, String indexName) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.indexName = indexName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getIndexName() {
- return indexName;
- }
+ public String getIndexName() {
+ return indexName;
+ }
- @Override
- public Kind getKind() {
- return Kind.INDEX_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.INDEX_DROP;
+ }
+ }
- public static class CompiledDataverseDropStatement implements ICompiledStatement {
- private String dataverseName;
- private boolean ifExists;
+ public static class CompiledDataverseDropStatement implements
+ ICompiledStatement {
+ private String dataverseName;
+ private boolean ifExists;
- public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
- this.dataverseName = dataverseName;
- this.ifExists = ifExists;
- }
+ public CompiledDataverseDropStatement(String dataverseName,
+ boolean ifExists) {
+ this.dataverseName = dataverseName;
+ this.ifExists = ifExists;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public boolean getIfExists() {
- return ifExists;
- }
+ public boolean getIfExists() {
+ return ifExists;
+ }
- @Override
- public Kind getKind() {
- return Kind.DATAVERSE_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DATAVERSE_DROP;
+ }
+ }
- public static class CompiledTypeDropStatement implements ICompiledStatement {
- private String typeName;
+ public static class CompiledTypeDropStatement implements ICompiledStatement {
+ private String typeName;
- public CompiledTypeDropStatement(String nodeGroupName) {
- this.typeName = nodeGroupName;
- }
+ public CompiledTypeDropStatement(String nodeGroupName) {
+ this.typeName = nodeGroupName;
+ }
- public String getTypeName() {
- return typeName;
- }
+ public String getTypeName() {
+ return typeName;
+ }
- @Override
- public Kind getKind() {
- return Kind.TYPE_DROP;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.TYPE_DROP;
+ }
+ }
- public static interface ICompiledDmlStatement extends ICompiledStatement {
+ public static interface ICompiledDmlStatement extends ICompiledStatement {
- public String getDataverseName();
+ public String getDataverseName();
- public String getDatasetName();
- }
+ public String getDatasetName();
+ }
- public static class CompiledCreateIndexStatement implements ICompiledDmlStatement {
- private final String indexName;
- private final String dataverseName;
- private final String datasetName;
- private final List<String> keyFields;
- private final IndexType indexType;
+ public static class CompiledCreateIndexStatement implements
+ ICompiledDmlStatement {
+ private final String indexName;
+ private final String dataverseName;
+ private final String datasetName;
+ private final List<String> keyFields;
+ private final IndexType indexType;
- // Specific to NGram index.
- private final int gramLength;
+ // Specific to NGram index.
+ private final int gramLength;
- public CompiledCreateIndexStatement(String indexName, String dataverseName, String datasetName,
- List<String> keyFields, int gramLength, IndexType indexType) {
- this.indexName = indexName;
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.keyFields = keyFields;
- this.gramLength = gramLength;
- this.indexType = indexType;
- }
+ public CompiledCreateIndexStatement(String indexName,
+ String dataverseName, String datasetName,
+ List<String> keyFields, int gramLength, IndexType indexType) {
+ this.indexName = indexName;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.keyFields = keyFields;
+ this.gramLength = gramLength;
+ this.indexType = indexType;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getIndexName() {
- return indexName;
- }
+ public String getIndexName() {
+ return indexName;
+ }
- public List<String> getKeyFields() {
- return keyFields;
- }
+ public List<String> getKeyFields() {
+ return keyFields;
+ }
- public IndexType getIndexType() {
- return indexType;
- }
+ public IndexType getIndexType() {
+ return indexType;
+ }
- public int getGramLength() {
- return gramLength;
- }
+ public int getGramLength() {
+ return gramLength;
+ }
- @Override
- public Kind getKind() {
- return Kind.CREATE_INDEX;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CREATE_INDEX;
+ }
+ }
- public static class CompiledLoadFromFileStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private boolean alreadySorted;
- private String adapter;
- private Map<String, String> properties;
+ public static class CompiledLoadFromFileStatement implements
+ ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private boolean alreadySorted;
+ private String adapter;
+ private Map<String, String> properties;
- public CompiledLoadFromFileStatement(String dataverseName, String datasetName, String adapter,
- Map<String, String> properties, boolean alreadySorted) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.alreadySorted = alreadySorted;
- this.adapter = adapter;
- this.properties = properties;
- }
+ public CompiledLoadFromFileStatement(String dataverseName,
+ String datasetName, String adapter,
+ Map<String, String> properties, boolean alreadySorted) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.alreadySorted = alreadySorted;
+ this.adapter = adapter;
+ this.properties = properties;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public boolean alreadySorted() {
- return alreadySorted;
- }
+ public boolean alreadySorted() {
+ return alreadySorted;
+ }
- public String getAdapter() {
- return adapter;
- }
+ public String getAdapter() {
+ return adapter;
+ }
- public Map<String, String> getProperties() {
- return properties;
- }
+ public Map<String, String> getProperties() {
+ return properties;
+ }
- @Override
- public Kind getKind() {
- return Kind.LOAD_FROM_FILE;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.LOAD_FROM_FILE;
+ }
+ }
- public static class CompiledInsertStatement implements ICompiledDmlStatement {
- private final String dataverseName;
- private final String datasetName;
- private final Query query;
- private final int varCounter;
+ public static class CompiledInsertStatement implements
+ ICompiledDmlStatement {
+ private final String dataverseName;
+ private final String datasetName;
+ private final Query query;
+ private final int varCounter;
- public CompiledInsertStatement(String dataverseName, String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledInsertStatement(String dataverseName,
+ String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- public String getDataverseName() {
- return dataverseName;
- }
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public String getDatasetName() {
- return datasetName;
- }
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.INSERT;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.INSERT;
+ }
+ }
- public static class CompiledBeginFeedStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private Query query;
- private int varCounter;
+ public static class CompiledBeginFeedStatement implements
+ ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private Query query;
+ private int varCounter;
- public CompiledBeginFeedStatement(String dataverseName, String datasetName, Query query, int varCounter) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.query = query;
- this.varCounter = varCounter;
- }
+ public CompiledBeginFeedStatement(String dataverseName,
+ String datasetName, Query query, int varCounter) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.query = query;
+ this.varCounter = varCounter;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- public void setQuery(Query query) {
- this.query = query;
- }
+ public void setQuery(Query query) {
+ this.query = query;
+ }
- @Override
- public Kind getKind() {
- return Kind.BEGIN_FEED;
- }
- }
+ @Override
+ public Kind getKind() {
+ return Kind.BEGIN_FEED;
+ }
+ }
- public static class CompiledControlFeedStatement implements ICompiledDmlStatement {
- private String dataverseName;
- private String datasetName;
- private OperationType operationType;
- private Query query;
- private int varCounter;
- private Map<String, String> alteredParams;
+ public static class CompiledControlFeedStatement implements
+ ICompiledDmlStatement {
+ private String dataverseName;
+ private String datasetName;
+ private OperationType operationType;
+ private Query query;
+ private int varCounter;
+ private Map<String, String> alteredParams;
- public CompiledControlFeedStatement(OperationType operationType, String dataverseName, String datasetName,
- Map<String, String> alteredParams) {
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.operationType = operationType;
- this.alteredParams = alteredParams;
- }
+ public CompiledControlFeedStatement(OperationType operationType,
+ String dataverseName, String datasetName,
+ Map<String, String> alteredParams) {
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.operationType = operationType;
+ this.alteredParams = alteredParams;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- public OperationType getOperationType() {
- return operationType;
- }
+ public OperationType getOperationType() {
+ return operationType;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Query getQuery() {
- return query;
- }
+ public Query getQuery() {
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.CONTROL_FEED;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.CONTROL_FEED;
+ }
- public Map<String, String> getProperties() {
- return alteredParams;
- }
+ public Map<String, String> getProperties() {
+ return alteredParams;
+ }
- public void setProperties(Map<String, String> properties) {
- this.alteredParams = properties;
- }
- }
+ public void setProperties(Map<String, String> properties) {
+ this.alteredParams = properties;
+ }
+ }
- public static class CompiledDeleteStatement implements ICompiledDmlStatement {
- private VariableExpr var;
- private String dataverseName;
- private String datasetName;
- private Expression condition;
- private Clause dieClause;
- private int varCounter;
- private AqlMetadataProvider metadataProvider;
+ public static class CompiledDeleteStatement implements
+ ICompiledDmlStatement {
+ private VariableExpr var;
+ private String dataverseName;
+ private String datasetName;
+ private Expression condition;
+ private Clause dieClause;
+ private int varCounter;
+ private AqlMetadataProvider metadataProvider;
- public CompiledDeleteStatement(VariableExpr var, String dataverseName, String datasetName,
- Expression condition, Clause dieClause, int varCounter, AqlMetadataProvider metadataProvider) {
- this.var = var;
- this.dataverseName = dataverseName;
- this.datasetName = datasetName;
- this.condition = condition;
- this.dieClause = dieClause;
- this.varCounter = varCounter;
- this.metadataProvider = metadataProvider;
- }
+ public CompiledDeleteStatement(VariableExpr var, String dataverseName,
+ String datasetName, Expression condition, Clause dieClause,
+ int varCounter, AqlMetadataProvider metadataProvider) {
+ this.var = var;
+ this.dataverseName = dataverseName;
+ this.datasetName = datasetName;
+ this.condition = condition;
+ this.dieClause = dieClause;
+ this.varCounter = varCounter;
+ this.metadataProvider = metadataProvider;
+ }
- @Override
- public String getDatasetName() {
- return datasetName;
- }
+ @Override
+ public String getDatasetName() {
+ return datasetName;
+ }
- @Override
- public String getDataverseName() {
- return dataverseName;
- }
+ @Override
+ public String getDataverseName() {
+ return dataverseName;
+ }
- public int getVarCounter() {
- return varCounter;
- }
+ public int getVarCounter() {
+ return varCounter;
+ }
- public Expression getCondition() {
- return condition;
- }
+ public Expression getCondition() {
+ return condition;
+ }
- public Clause getDieClause() {
- return dieClause;
- }
+ public Clause getDieClause() {
+ return dieClause;
+ }
- public Query getQuery() throws AlgebricksException {
+ public Query getQuery() throws AlgebricksException {
- List<Expression> arguments = new ArrayList<Expression>();
- String arg = dataverseName == null ? datasetName : dataverseName + "." + datasetName;
- LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
- arguments.add(argumentLiteral);
+ List<Expression> arguments = new ArrayList<Expression>();
+ String arg = dataverseName == null ? datasetName : dataverseName
+ + "." + datasetName;
+ LiteralExpr argumentLiteral = new LiteralExpr(
+ new StringLiteral(arg));
+ arguments.add(argumentLiteral);
- CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
- arguments);
- List<Clause> clauseList = new ArrayList<Clause>();
- Clause forClause = new ForClause(var, callExpression);
- clauseList.add(forClause);
- Clause whereClause = null;
- if (condition != null) {
- whereClause = new WhereClause(condition);
- clauseList.add(whereClause);
- }
- if (dieClause != null) {
- clauseList.add(dieClause);
- }
+ CallExpr callExpression = new CallExpr(new FunctionSignature(
+ FunctionConstants.ASTERIX_NS, "dataset", 1), arguments);
+ List<Clause> clauseList = new ArrayList<Clause>();
+ Clause forClause = new ForClause(var, callExpression);
+ clauseList.add(forClause);
+ Clause whereClause = null;
+ if (condition != null) {
+ whereClause = new WhereClause(condition);
+ clauseList.add(whereClause);
+ }
+ if (dieClause != null) {
+ clauseList.add(dieClause);
+ }
- Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
- if (dataset == null) {
- throw new AlgebricksException("Unknown dataset " + datasetName);
- }
- String itemTypeName = dataset.getItemTypeName();
- IAType itemType = metadataProvider.findType(dataset.getDataverseName(), itemTypeName);
- ARecordType recType = (ARecordType) itemType;
- String[] fieldNames = recType.getFieldNames();
- List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
- for (int i = 0; i < fieldNames.length; i++) {
- FieldAccessor fa = new FieldAccessor(var, new Identifier(fieldNames[i]));
- FieldBinding fb = new FieldBinding(new LiteralExpr(new StringLiteral(fieldNames[i])), fa);
- fieldBindings.add(fb);
- }
- RecordConstructor rc = new RecordConstructor(fieldBindings);
+ Dataset dataset = metadataProvider.findDataset(dataverseName,
+ datasetName);
+ if (dataset == null) {
+ throw new AlgebricksException("Unknown dataset " + datasetName);
+ }
+ String itemTypeName = dataset.getItemTypeName();
+ IAType itemType = metadataProvider.findType(
+ dataset.getDataverseName(), itemTypeName);
+ ARecordType recType = (ARecordType) itemType;
+ String[] fieldNames = recType.getFieldNames();
+ List<FieldBinding> fieldBindings = new ArrayList<FieldBinding>();
+ for (int i = 0; i < fieldNames.length; i++) {
+ FieldAccessor fa = new FieldAccessor(var, new Identifier(
+ fieldNames[i]));
+ FieldBinding fb = new FieldBinding(new LiteralExpr(
+ new StringLiteral(fieldNames[i])), fa);
+ fieldBindings.add(fb);
+ }
+ RecordConstructor rc = new RecordConstructor(fieldBindings);
- FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
- Query query = new Query();
- query.setBody(flowgr);
- return query;
- }
+ FLWOGRExpression flowgr = new FLWOGRExpression(clauseList, rc);
+ Query query = new Query();
+ query.setBody(flowgr);
+ return query;
+ }
- @Override
- public Kind getKind() {
- return Kind.DELETE;
- }
+ @Override
+ public Kind getKind() {
+ return Kind.DELETE;
+ }
- }
+ }
}
diff --git a/asterix-app/data/hdfs/asterix_info.txt b/asterix-app/data/hdfs/asterix_info.txt
new file mode 100644
index 0000000..a9a5596
--- /dev/null
+++ b/asterix-app/data/hdfs/asterix_info.txt
@@ -0,0 +1,4 @@
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
diff --git a/asterix-app/data/hdfs/large_text b/asterix-app/data/hdfs/large_text
new file mode 100644
index 0000000..31a394d
--- /dev/null
+++ b/asterix-app/data/hdfs/large_text
@@ -0,0 +1,136 @@
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
+The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information.
+The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters.
+ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual.
+ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information.
diff --git a/asterix-app/data/hdfs/obamatweets.adm b/asterix-app/data/hdfs/obamatweets.adm
new file mode 100644
index 0000000..2567483
--- /dev/null
+++ b/asterix-app/data/hdfs/obamatweets.adm
@@ -0,0 +1,11 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
diff --git a/asterix-app/data/hdfs/textFileS b/asterix-app/data/hdfs/textFileS
new file mode 100644
index 0000000..4bd0604
--- /dev/null
+++ b/asterix-app/data/hdfs/textFileS
Binary files differ
diff --git a/asterix-app/data/twitter/obamatweets.adm b/asterix-app/data/twitter/obamatweets.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/data/twitter/obamatweets.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/data/twitter/tw_messages.adm b/asterix-app/data/twitter/tw_messages.adm
new file mode 100644
index 0000000..88f5e24
--- /dev/null
+++ b/asterix-app/data/twitter/tw_messages.adm
@@ -0,0 +1,10 @@
+{"tweetid":"1","tweetid-copy":"1","user":{"screen-name":"RollandEckhardstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland Eckhardstein","followers_count":3311368},"sender-location":point("42.13,80.43"),"send-time":datetime("2005-12-05T21:06:41"),"send-time-copy":datetime("2005-12-05T21:06:41"),"referred-topics":{{"samsung","plan"}},"message-text":" love samsung the plan is amazing"}
+{"tweetid":"2","tweetid-copy":"2","user":{"screen-name":"RollandEckhardstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"David Eckhardstein","followers_count":3311368},"sender-location":point("28.86,70.44"),"send-time":datetime("2007-08-15T06:44:17"),"send-time-copy":datetime("2007-08-15T06:44:17"),"referred-topics":{{"sprint","voice-clarity"}},"message-text":" like sprint its voice-clarity is mind-blowing"}
+{"tweetid":"3","tweetid-copy":"3","user":{"screen-name":"RollandEckhard#500","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland Hetfield","followers_count":3311368},"sender-location":point("39.84,86.48"),"send-time":datetime("2008-12-24T00:07:04"),"send-time-copy":datetime("2008-12-24T00:07:04"),"referred-topics":{{"verizon","voice-command"}},"message-text":" can't stand verizon its voice-command is terrible:("}
+{"tweetid":"4","tweetid-copy":"4","user":{"screen-name":"RollandEckhardstein#221","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland Eckhardstinz","followers_count":3311368},"sender-location":point("27.67,87.32"),"send-time":datetime("2007-02-05T16:39:13"),"send-time-copy":datetime("2007-02-05T16:39:13"),"referred-topics":{{"t-mobile","customer-service"}},"message-text":" love t-mobile its customer-service is mind-blowing"}
+{"tweetid":"5","tweetid-copy":"5","user":{"screen-name":"RollandEcstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland Eckhardst","followers_count":3311368},"sender-location":point("27.3,92.77"),"send-time":datetime("2010-09-12T06:15:28"),"send-time-copy":datetime("2010-09-12T06:15:28"),"referred-topics":{{"t-mobile","customization"}},"message-text":" like t-mobile the customization is amazing:)"}
+{"tweetid":"6","tweetid-copy":"6","user":{"screen-name":"Rollkhardstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Kirk Hammette ","followers_count":3311368},"sender-location":point("45.62,84.78"),"send-time":datetime("2012-01-23T06:23:13"),"send-time-copy":datetime("2012-01-23T06:23:13"),"referred-topics":{{"iphone","network"}},"message-text":" like iphone its network is awesome:)"}
+{"tweetid":"7","tweetid-copy":"7","user":{"screen-name":"andEckhardstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland khardstein","followers_count":3311368},"sender-location":point("44.12,81.46"),"send-time":datetime("2012-02-17T17:30:26"),"send-time-copy":datetime("2012-02-17T17:30:26"),"referred-topics":{{"t-mobile","network"}},"message-text":" hate t-mobile the network is bad"}
+{"tweetid":"8","tweetid-copy":"8","user":{"screen-name":"Rolltein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Ron Eckhardstein","followers_count":3311368},"sender-location":point("36.86,90.71"),"send-time":datetime("2009-03-12T13:18:04"),"send-time-copy":datetime("2009-03-12T13:18:04"),"referred-topics":{{"at&t","touch-screen"}},"message-text":" dislike at&t its touch-screen is OMG"}
+{"tweetid":"9","tweetid-copy":"9","user":{"screen-name":"Roldstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland Eckdstein","followers_count":3311368},"sender-location":point("29.07,97.05"),"send-time":datetime("2012-08-15T20:19:46"),"send-time-copy":datetime("2012-08-15T20:19:46"),"referred-topics":{{"verizon","speed"}},"message-text":" hate verizon its speed is bad"}
+{"tweetid":"10","tweetid-copy":"10","user":{"screen-name":"Rolldstein#211","lang":"en","friends_count":3657079,"statuses_count":268,"name":"Rolland Eckhardstful","followers_count":3311368},"sender-location":point("46.94,93.98"),"send-time":datetime("2011-04-07T14:08:46"),"send-time-copy":datetime("2011-04-07T14:08:46"),"referred-topics":{{"t-mobile","signal"}},"message-text":" like t-mobile the signal is good"}
\ No newline at end of file
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index b47aadc..2f80c30 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -129,17 +129,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
index 1aad2ec..1eb37cd 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/api/common/APIFramework.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.api.common;
import java.io.PrintWriter;
@@ -57,6 +71,10 @@
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobSpecification;
+/**
+ * Provides helper methods for compilation of a query into a JobSpec and submission
+ * to Hyracks through the Hyracks client interface.
+ */
public class APIFramework {
private static List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> buildDefaultLogicalRewrites() {
@@ -131,8 +149,9 @@
HTML
}
- public static Pair<Query, Integer> reWriteQuery(List<FunctionDecl> declaredFunctions, AqlMetadataProvider metadataProvider,
- Query q, SessionConfig pc, PrintWriter out, DisplayFormat pdf) throws AsterixException {
+ public static Pair<Query, Integer> reWriteQuery(List<FunctionDecl> declaredFunctions,
+ AqlMetadataProvider metadataProvider, Query q, SessionConfig pc, PrintWriter out, DisplayFormat pdf)
+ throws AsterixException {
if (!pc.isPrintPhysicalOpsOnly() && pc.isPrintExprParam()) {
out.println();
switch (pdf) {
@@ -261,7 +280,7 @@
OptimizationConfUtil.getPhysicalOptimizationConfig().setFrameSize(frameSize);
builder.setPhysicalOptimizationConfig(OptimizationConfUtil.getPhysicalOptimizationConfig());
-
+
ICompiler compiler = compilerFactory.createCompiler(plan, queryMetadataProvider, t.getVarCounter());
if (pc.isOptimize()) {
compiler.optimize();
@@ -310,13 +329,14 @@
builder.setExpressionRuntimeProvider(new LogicalExpressionJobGenToExpressionRuntimeProviderAdapter(
AqlLogicalExpressionJobGen.INSTANCE));
builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider());
+ builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
builder.setNullWriterFactory(format.getNullWriterFactory());
builder.setPrinterProvider(format.getPrinterFactoryProvider());
builder.setSerializerDeserializerProvider(format.getSerdeProvider());
builder.setTypeTraitProvider(format.getTypeTraitProvider());
builder.setNormalizedKeyComputerFactoryProvider(format.getNormalizedKeyComputerFactoryProvider());
- JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.INSTANCE);
+ JobSpecification spec = compiler.createJob(AsterixAppContextInfoImpl.getInstance());
// set the job event listener
spec.setJobletEventListenerFactory(new JobEventListenerFactory(queryMetadataProvider.getJobTxnId(),
isWriteTransaction));
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
index 80293b0..508126f 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/aql/translator/AqlTranslator.java
@@ -106,6 +106,10 @@
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+/*
+ * Provides functionality for executing a batch of AQL statements (queries included)
+ * sequentially.
+ */
public class AqlTranslator extends AbstractAqlTranslator {
private final List<Statement> aqlStatements;
@@ -134,6 +138,12 @@
return functionDecls;
}
+ /**
+ * Compiles and submits for execution a list of AQL statements.
+ * @param hcc AHyracks client connection that is used to submit a jobspec to Hyracks.
+ * @return A List<QueryResult> containing a QueryResult instance corresponding to each submitted query.
+ * @throws Exception
+ */
public List<QueryResult> compileAndExecute(IHyracksClientConnection hcc) throws Exception {
List<QueryResult> executionResult = new ArrayList<QueryResult>();
FileSplit outputFile = null;
@@ -709,14 +719,16 @@
IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
BeginFeedStatement bfs = (BeginFeedStatement) stmt;
String dataverseName = bfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
- : activeDefaultDataverse.getDataverseName() : bfs.getDatasetName().getValue();
+ : activeDefaultDataverse.getDataverseName() : bfs.getDataverseName().getValue();
CompiledBeginFeedStatement cbfs = new CompiledBeginFeedStatement(dataverseName,
bfs.getDatasetName().getValue(), bfs.getQuery(), bfs.getVarCounter());
- Dataset dataset;
- dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
+ Dataset dataset = MetadataManager.INSTANCE.getDataset(metadataProvider.getMetadataTxnContext(), dataverseName, bfs
.getDatasetName().getValue());
+ if(dataset == null) {
+ throw new AsterixException("Unknown dataset :" + bfs.getDatasetName().getValue());
+ }
IDatasetDetails datasetDetails = dataset.getDatasetDetails();
if (datasetDetails.getDatasetType() != DatasetType.FEED) {
throw new IllegalArgumentException("Dataset " + bfs.getDatasetName().getValue() + " is not a feed dataset");
@@ -733,7 +745,7 @@
IHyracksClientConnection hcc, List<JobSpecification> jobsToExecute) throws Exception {
ControlFeedStatement cfs = (ControlFeedStatement) stmt;
String dataverseName = cfs.getDataverseName() == null ? activeDefaultDataverse == null ? null
- : activeDefaultDataverse.getDataverseName() : cfs.getDatasetName().getValue();
+ : activeDefaultDataverse.getDataverseName() : cfs.getDataverseName().getValue();
CompiledControlFeedStatement clcfs = new CompiledControlFeedStatement(cfs.getOperationType(), dataverseName,
cfs.getDatasetName().getValue(), cfs.getAlterAdapterConfParams());
jobsToExecute.add(FeedOperations.buildControlFeedJobSpec(clcfs, metadataProvider));
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
index 546973c..f9bd2d5 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/file/FeedOperations.java
@@ -21,10 +21,10 @@
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.feed.comm.AlterFeedMessage;
-import edu.uci.ics.asterix.feed.comm.FeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage.MessageType;
+import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage.MessageType;
import edu.uci.ics.asterix.metadata.declared.AqlMetadataProvider;
import edu.uci.ics.asterix.metadata.entities.Dataset;
import edu.uci.ics.asterix.metadata.entities.FeedDatasetDetails;
@@ -38,16 +38,29 @@
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+/**
+ * Provides helper method(s) for creating JobSpec for operations on a feed.
+ */
public class FeedOperations {
private static final Logger LOGGER = Logger.getLogger(IndexOperations.class.getName());
+ /**
+ * @param controlFeedStatement
+ * The statement representing the action that describes the
+ * action that needs to be taken on the feed. E.g. of actions are
+ * stop feed or alter feed.
+ * @param metadataProvider
+ * An instance of the MetadataProvider
+ * @return An instance of JobSpec for the job that would send an appropriate
+ * control message to the running feed.
+ * @throws AsterixException
+ * @throws AlgebricksException
+ */
public static JobSpecification buildControlFeedJobSpec(CompiledControlFeedStatement controlFeedStatement,
AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
switch (controlFeedStatement.getOperationType()) {
case ALTER:
- case SUSPEND:
- case RESUME:
case END: {
return createSendMessageToFeedJobSpec(controlFeedStatement, metadataProvider);
}
@@ -86,15 +99,9 @@
List<IFeedMessage> feedMessages = new ArrayList<IFeedMessage>();
switch (controlFeedStatement.getOperationType()) {
- case SUSPEND:
- feedMessages.add(new FeedMessage(MessageType.SUSPEND));
- break;
case END:
feedMessages.add(new FeedMessage(MessageType.STOP));
break;
- case RESUME:
- feedMessages.add(new FeedMessage(MessageType.RESUME));
- break;
case ALTER:
feedMessages.add(new AlterFeedMessage(controlFeedStatement.getProperties()));
break;
@@ -102,8 +109,8 @@
try {
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = metadataProvider.buildFeedMessengerRuntime(
- metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(),
- metadataProvider.getDefaultDataverseName(), datasetName, feedMessages);
+ metadataProvider, spec, (FeedDatasetDetails) dataset.getDatasetDetails(), dataverseName,
+ datasetName, feedMessages);
feedMessenger = p.first;
messengerPc = p.second;
} catch (AlgebricksException e) {
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
index ccba498..434f44c 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/CCBootstrapImpl.java
@@ -29,6 +29,7 @@
import edu.uci.ics.asterix.api.aqlj.server.APIClientThreadFactory;
import edu.uci.ics.asterix.api.aqlj.server.ThreadedServer;
import edu.uci.ics.asterix.api.http.servlet.APIServlet;
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
import edu.uci.ics.asterix.common.config.GlobalConfig;
import edu.uci.ics.asterix.metadata.MetadataManager;
import edu.uci.ics.asterix.metadata.api.IAsterixStateProxy;
@@ -37,6 +38,10 @@
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
import edu.uci.ics.hyracks.api.application.ICCBootstrap;
+/**
+ * The bootstrap class of the application that will manage its
+ * life cycle at the Cluster Controller.
+ */
public class CCBootstrapImpl implements ICCBootstrap {
private static final Logger LOGGER = Logger.getLogger(CCBootstrapImpl.class.getName());
@@ -71,6 +76,9 @@
// Setup and start the API server
setupAPIServer();
apiServer.start();
+
+ //Initialize AsterixAppContext
+ AsterixAppContextInfoImpl.initialize(appCtx);
}
@Override
@@ -79,7 +87,7 @@
LOGGER.info("Stopping Asterix cluster controller");
}
AsterixStateProxy.unregisterRemoteObject();
-
+
webServer.stop();
apiServer.shutdown();
}
@@ -107,11 +115,7 @@
// set the APINodeDataServer ports
int startPort = DEFAULT_API_NODEDATA_SERVER_PORT;
Map<String, Set<String>> nodeNameMap = new HashMap<String, Set<String>>();
- try {
- appCtx.getCCContext().getIPAddressNodeMap(nodeNameMap);
- } catch (Exception e) {
- throw new IOException("Unable to obtain IP address node map", e);
- }
+ getIPAddressNodeMap(nodeNameMap);
for (Map.Entry<String, Set<String>> entry : nodeNameMap.entrySet()) {
Set<String> nodeNames = entry.getValue();
@@ -122,7 +126,15 @@
proxy.setAsterixNodeState(it.next(), ns);
}
}
-
apiServer = new ThreadedServer(DEFAULT_API_SERVER_PORT, new APIClientThreadFactory(appCtx));
}
+
+ private void getIPAddressNodeMap(Map<String, Set<String>> nodeNameMap) throws IOException {
+ nodeNameMap.clear();
+ try {
+ appCtx.getCCContext().getIPAddressNodeMap(nodeNameMap);
+ } catch (Exception e) {
+ throw new IOException("Unable to obtain IP address node map", e);
+ }
+ }
}
\ No newline at end of file
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
index 9b7c356..3505b23 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/metadata/MetadataTest.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.test.metadata;
import java.io.File;
@@ -21,6 +35,9 @@
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
+/**
+ * Executes the Metadata tests.
+ */
@RunWith(Parameterized.class)
public class MetadataTest {
@@ -30,8 +47,7 @@
private static final String PATH_ACTUAL = "mdtest/";
private static final String PATH_BASE = "src/test/resources/metadata/";
private static final String TEST_CONFIG_FILE_NAME = "test.properties";
- private static final String WEB_SERVER_PORT="19002";
- private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
+ private static final String WEB_SERVER_PORT = "19002";
public MetadataTest(TestCaseContext tcCtx) {
this.tcCtx = tcCtx;
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
index d35001e..8eafd02 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/ExecutionTest.java
@@ -21,6 +21,9 @@
import edu.uci.ics.asterix.testframework.context.TestCaseContext;
import edu.uci.ics.asterix.testframework.xml.TestCase.CompilationUnit;
+/**
+ * Runs the runtime test cases under 'src/test/resources/runtimets'.
+ */
@RunWith(Parameterized.class)
public class ExecutionTest {
private static final String PATH_ACTUAL = "rttest/";
@@ -49,7 +52,7 @@
AsterixHyracksIntegrationUtil.init();
// TODO: Uncomment when hadoop version is upgraded and adapters are ported
- //HDFSCluster.getInstance().setup();
+ HDFSCluster.getInstance().setup();
}
@AfterClass
@@ -70,6 +73,7 @@
FileUtils.deleteDirectory(log);
File lsn = new File("last_checkpoint_lsn");
lsn.deleteOnExit();
+ HDFSCluster.getInstance().cleanup();
}
@Parameters
diff --git a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
index c07cff2..20df118 100644
--- a/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/edu/uci/ics/asterix/test/runtime/HDFSCluster.java
@@ -1,5 +1,20 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.test.runtime;
+import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
@@ -7,20 +22,29 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
+import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
+import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
+
+/**
+ * Manages a Mini (local VM) HDFS cluster with a configured number of datanodes.
+ *
+ * @author ramangrover29
+ */
@SuppressWarnings("deprecation")
public class HDFSCluster {
private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
- private static final int nameNodePort = 10009;
- private static final String DATA_PATH = "data/tpch0.001";
- private static final String HDFS_PATH = "/tpch";
+ private static final int nameNodePort = 31888;
+ private static final String DATA_PATH = "data/hdfs";
+ private static final String HDFS_PATH = "/asterix";
private static final HDFSCluster INSTANCE = new HDFSCluster();
private MiniDFSCluster dfsCluster;
private int numDataNodes = 2;
private JobConf conf = new JobConf();
+ private FileSystem dfs;
public static HDFSCluster getInstance() {
return INSTANCE;
@@ -30,16 +54,30 @@
}
+ /**
+ * Instantiates the (Mini) DFS Cluster with the configured number of datanodes.
+ * Post instantiation, data is laoded to HDFS.
+ * Called prior to running the Runtime test suite.
+ */
public void setup() throws Exception {
conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
cleanupLocal();
dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
- FileSystem dfs = FileSystem.get(conf);
- Path src = new Path(DATA_PATH);
- Path dest = new Path(HDFS_PATH);
- dfs.copyFromLocalFile(src, dest);
+ dfs = FileSystem.get(conf);
+ loadData();
+ }
+
+ private void loadData() throws IOException {
+ Path destDir = new Path(HDFS_PATH);
+ dfs.mkdirs(destDir);
+ File srcDir = new File(DATA_PATH);
+ File[] listOfFiles = srcDir.listFiles();
+ for (File srcFile : listOfFiles) {
+ Path path = new Path(srcFile.getAbsolutePath());
+ dfs.copyFromLocalFile(path, destDir);
+ }
}
private void cleanupLocal() throws IOException {
@@ -57,7 +95,25 @@
public static void main(String[] args) throws Exception {
HDFSCluster cluster = new HDFSCluster();
cluster.setup();
- cluster.cleanup();
+ JobConf conf = configureJobConf();
+ FileSystem fs = FileSystem.get(conf);
+ InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
+ for (InputSplit split : inputSplits) {
+ System.out.println("split :" + split);
+ }
+ // cluster.cleanup();
+ }
+
+ private static JobConf configureJobConf() throws Exception {
+ JobConf conf = new JobConf();
+ String hdfsUrl = "hdfs://127.0.0.1:31888";
+ String hdfsPath = "/asterix/extrasmalltweets.txt";
+ conf.set("fs.default.name", hdfsUrl);
+ conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
+ conf.setClassLoader(HDFSAdapter.class.getClassLoader());
+ conf.set("mapred.input.dir", hdfsPath);
+ conf.set("mapred.input.format.class", "org.apache.hadoop.mapred.TextInputFormat");
+ return conf;
}
}
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql
new file mode 100644
index 0000000..f4547d6
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_1.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Dataset
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Dataset;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql
new file mode 100644
index 0000000..d62bb49
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_2.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Dataset
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Dataverse;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql
new file mode 100644
index 0000000..96dd8cc
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_3.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Nodegroup
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Nodegroup;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql
new file mode 100644
index 0000000..7685427
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_4.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Index
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Index;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql
new file mode 100644
index 0000000..5e6e468
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_5.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.DatasourceAdapter
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.DatasourceAdapter;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql
new file mode 100644
index 0000000..0eb863f
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_6.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Function
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Function;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql
new file mode 100644
index 0000000..6794d04
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_7.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Datatype
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Datatype;
diff --git a/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql
new file mode 100644
index 0000000..d75e27a
--- /dev/null
+++ b/asterix-app/src/test/resources/metadata/queries/exception/issue_239_drop_system_dataset_8.aql
@@ -0,0 +1,8 @@
+/*
+ * Description : Drop a system dataset- Metadata.Node
+ * Expected Res : Failure
+ * Date : 13 Jan 2013
+ * Issue : 239
+ */
+
+drop dataset Metadata.Node;
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
index b351cfb..8a56248 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
@@ -1,56 +1,57 @@
-{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "string" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
-{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Nov 05 10:33:40 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "DataTypeName", "FieldType": "string" }, { "FieldName": "DatasetType", "FieldType": "string" }, { "FieldName": "InternalDetails", "FieldType": "Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "ExternalDetails", "FieldType": "Field_ExternalDetails_in_DatasetRecordType" }, { "FieldName": "FeedDetails", "FieldType": "Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatasourceAdapterRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Classname", "FieldType": "string" }, { "FieldName": "Type", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatatypeName", "FieldType": "string" }, { "FieldName": "Derived", "FieldType": "Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "DataverseRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DataFormat", "FieldType": "string" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FieldName", "FieldType": "string" }, { "FieldName": "FieldType", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_NodeNames_in_NodeGroupRecordType", "Derived": { "Tag": "UNORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": "string", "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Params_in_FunctionRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType_ItemType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Value", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_SearchKey_in_IndexRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "UNION", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": [ "null", "string" ], "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "FunctionRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "Name", "FieldType": "string" }, { "FieldName": "Arity", "FieldType": "string" }, { "FieldName": "Params", "FieldType": "Field_Params_in_FunctionRecordType" }, { "FieldName": "ReturnType", "FieldType": "string" }, { "FieldName": "Definition", "FieldType": "string" }, { "FieldName": "Language", "FieldType": "string" }, { "FieldName": "Kind", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "IndexRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DataverseName", "FieldType": "string" }, { "FieldName": "DatasetName", "FieldType": "string" }, { "FieldName": "IndexName", "FieldType": "string" }, { "FieldName": "IndexStructure", "FieldType": "string" }, { "FieldName": "SearchKey", "FieldType": "Field_SearchKey_in_IndexRecordType" }, { "FieldName": "IsPrimary", "FieldType": "boolean" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeGroupRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "NodeNames", "FieldType": "Field_NodeNames_in_NodeGroupRecordType" }, { "FieldName": "Timestamp", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "NodeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": false, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "NodeName", "FieldType": "string" }, { "FieldName": "NumberOfCores", "FieldType": "int32" }, { "FieldName": "WorkingMemorySize", "FieldType": "int32" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "Tag", "FieldType": "string" }, { "FieldName": "IsAnonymous", "FieldType": "boolean" }, { "FieldName": "EnumValues", "FieldType": "Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Record", "FieldType": "Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "Union", "FieldType": "Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "UnorderedList", "FieldType": "Field_UnorderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" }, { "FieldName": "OrderedList", "FieldType": "Field_OrderedList_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_EnumValues_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_ExternalDetails_in_DatasetRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" }, { "FieldName": "DatasourceAdapter", "FieldType": "string" }, { "FieldName": "Properties", "FieldType": "Field_Properties_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Function", "FieldType": "Field_Function_in_Type_#1_UnionType_Field_FeedDetails_in_DatasetRecordType" }, { "FieldName": "Status", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "FileStructure", "FieldType": "string" }, { "FieldName": "PartitioningStrategy", "FieldType": "string" }, { "FieldName": "PartitioningKey", "FieldType": "Field_PartitioningKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "PrimaryKey", "FieldType": "Field_PrimaryKey_in_Type_#1_UnionType_Field_InternalDetails_in_DatasetRecordType" }, { "FieldName": "GroupName", "FieldType": "string" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "RECORD", "IsAnonymous": true, "EnumValues": null, "Record": { "IsOpen": true, "Fields": [ { "FieldName": "IsOpen", "FieldType": "boolean" }, { "FieldName": "Fields", "FieldType": "Field_Fields_in_Type_#1_UnionType_Field_Record_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType" } ] }, "Union": null, "UnorderedList": null, "OrderedList": null }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "Type_#1_UnionType_Field_Union_in_Type_#1_UnionType_Field_Derived_in_DatatypeRecordType", "Derived": { "Tag": "ORDEREDLIST", "IsAnonymous": true, "EnumValues": null, "Record": null, "Union": null, "UnorderedList": null, "OrderedList": "string" }, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "boolean", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "circle", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "date", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "datetime", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "double", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "duration", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "float", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int16", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int32", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int64", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "int8", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "line", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "null", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "point", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "point3d", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "polygon", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "rectangle", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
+{ "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Dec 24 14:01:42 PST 2012" }
diff --git a/asterix-app/src/test/resources/metadata/testsuite.xml b/asterix-app/src/test/resources/metadata/testsuite.xml
index 78dc3b5..b1b303e 100644
--- a/asterix-app/src/test/resources/metadata/testsuite.xml
+++ b/asterix-app/src/test/resources/metadata/testsuite.xml
@@ -108,43 +108,49 @@
</test-group>
<test-group name="exception">
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_dataset">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_1">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_dataverse">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_2">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_index">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_3">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_nodegroup">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_4">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_type1">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_5">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_type2">
- <expected-error>MetadataException</expected-error>
+ <compilation-unit name="issue_239_drop_system_dataset_6">
<output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
</compilation-unit>
</test-case>
<test-case FilePath="exception">
- <compilation-unit name="exception_drop_type3">
+ <compilation-unit name="issue_239_drop_system_dataset_7">
+ <output-file compare="Text">none.adm</output-file>
+ <expected-error>MetadataException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="exception">
+ <compilation-unit name="issue_239_drop_system_dataset_8">
<output-file compare="Text">none.adm</output-file>
<expected-error>MetadataException</expected-error>
</compilation-unit>
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan
index 8cadace..0f98d59 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join-multipred.plan
@@ -2,15 +2,17 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- BTREE_SEARCH |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$15(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$15(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
index b9b830e..a83e0eb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_01.plan
@@ -2,14 +2,15 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- BTREE_SEARCH |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$11(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
index d11dc6a..d6d4497 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_02.plan
@@ -2,14 +2,15 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- BTREE_SEARCH |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
index a2e27c9..c5c0f4f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/primary-equi-join_03.plan
@@ -2,9 +2,10 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- BTREE_SEARCH |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
index 76a5aa8..947404f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multiindex.plan
@@ -2,18 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$29(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$29(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan
index 0edd774..e767365 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join-multipred.plan
@@ -2,18 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan
index 14cd128..7e0c315 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_01.plan
@@ -2,17 +2,19 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$13(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$13(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan
index 14cd128..7e0c315 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_02.plan
@@ -2,17 +2,19 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$13(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$13(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan
index 14cd128..7e0c315 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index-join/secondary-equi-join_03.plan
@@ -2,17 +2,19 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$13(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$13(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-01.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-02.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-03.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-04.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-05.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-06.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan
index 4189498..46990a8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-primary-07.plan
@@ -2,7 +2,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-31.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-32.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-33.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-34.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-35.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-36.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-37.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan
index f689565..7c1c1a3 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-38.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$16(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$16(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-39.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-40.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-41.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-42.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-43.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-44.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan
index 22b26c4..35c7ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-45.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan
index 22b26c4..35c7ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-46.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-47.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-48.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan
index 22b26c4..35c7ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-49.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan
index 22b26c4..35c7ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-50.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan
index 22b26c4..35c7ebb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-51.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan
index bc7e26b..e6e6ddf 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-52.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan
index bc7e26b..e6e6ddf 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-53.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan
index bc7e26b..e6e6ddf 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-54.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan
index bc7e26b..e6e6ddf 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-55.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan
index 4f9f4ba..16e6d6a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-56.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$12(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$12(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan
index 4f9f4ba..16e6d6a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-57.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$12(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$12(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan
index 8c6eaed..edeed53 100644
--- a/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/btree-index/btree-secondary-58.plan
@@ -3,8 +3,8 @@
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan
index 553a531..26ea56f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-complex.plan
@@ -2,9 +2,11 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/consolidate-selects-simple.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/const-folding.plan b/asterix-app/src/test/resources/optimizerts/results/const-folding.plan
index 70e2daa..413cf0c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/const-folding.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/const-folding.plan
@@ -1,4 +1,3 @@
-- SINK_WRITE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
+ -- ASSIGN |UNPARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan b/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan
index 927acdd..c769bda 100644
--- a/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/count-tweets.plan
@@ -9,18 +9,19 @@
-- NESTED_TUPLE_SOURCE |LOCAL|
}
-- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$14(ASC)] HASH:[$$14] |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$4] |LOCAL|
+ -- PRE_CLUSTERED_GROUP_BY[$$4] |PARTITIONED|
{
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$4(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$4(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan b/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan
index 6559478..7670428 100644
--- a/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/cust_group_no_agg.plan
@@ -1,18 +1,17 @@
-- SINK_WRITE |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$1(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$6] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$6] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$6(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
+ -- STABLE_SORT [$$6(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan b/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan
index c48c00c..b471681 100644
--- a/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/denorm-cust-order.plan
@@ -8,8 +8,8 @@
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$16(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$16(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -19,8 +19,8 @@
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$19] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/fj-dblp-csx.plan b/asterix-app/src/test/resources/optimizerts/results/fj-dblp-csx.plan
index edfabb8..9fbd8ff 100644
--- a/asterix-app/src/test/resources/optimizerts/results/fj-dblp-csx.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/fj-dblp-csx.plan
@@ -7,8 +7,8 @@
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$42(ASC), $$44(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$42(ASC), $$44(ASC)] |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$42, $$44] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -23,8 +23,8 @@
-- STREAM_SELECT |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$46(ASC), $$5(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$46(ASC), $$5(ASC)] |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$46] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -32,11 +32,12 @@
-- HASH_PARTITION_EXCHANGE [$$3] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |UNPARTITIONED|
-- ASSIGN |UNPARTITIONED|
@@ -47,10 +48,11 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- UNNEST |PARTITIONED|
@@ -61,8 +63,8 @@
-- STREAM_SELECT |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$47(ASC), $$14(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$47(ASC), $$14(ASC)] |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$47] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -70,11 +72,12 @@
-- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- SPLIT |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$35] |PARTITIONED|
@@ -82,7 +85,8 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
index c930bc7..d0479cc 100644
--- a/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/fj-phase1.plan
@@ -2,51 +2,55 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$23] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC), $$6(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$23] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$1][$$6] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$1] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
- -- RUNNING_AGGREGATE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$23] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC), $$6(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$23] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$1][$$6] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$1] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$24(DESC) ] |PARTITIONED|
- -- STABLE_SORT [$$24(DESC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$30] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$30(ASC)] HASH:[$$30] |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$5] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$5(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- UNNEST |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
+ -- RUNNING_AGGREGATE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$24(DESC) ] |PARTITIONED|
+ -- STABLE_SORT [$$24(DESC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$30] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$30(ASC)] HASH:[$$30] |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$5] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$5(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- UNNEST |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/fj-phase2-with-hints.plan b/asterix-app/src/test/resources/optimizerts/results/fj-phase2-with-hints.plan
index 273b25d..a1dc0d4 100644
--- a/asterix-app/src/test/resources/optimizerts/results/fj-phase2-with-hints.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/fj-phase2-with-hints.plan
@@ -1,52 +1,56 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$31(ASC) ] |PARTITIONED|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$27] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- IN_MEMORY_STABLE_SORT [$$4(ASC)] |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- IN_MEMORY_HASH_JOIN [$$2][$$7] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- RUNNING_AGGREGATE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$31(ASC) ] |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$27] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- IN_MEMORY_STABLE_SORT [$$4(ASC)] |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- IN_MEMORY_HASH_JOIN [$$2][$$7] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$28(ASC), $$7(ASC) ] |PARTITIONED|
- -- STABLE_SORT [$$28(ASC), $$7(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EXTERNAL_GROUP_BY[$$36] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- HASH_PARTITION_EXCHANGE [$$36] |PARTITIONED|
- -- EXTERNAL_GROUP_BY[$$6] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- UNNEST |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- RUNNING_AGGREGATE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$28(ASC), $$7(ASC) ] |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC), $$7(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$36] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- HASH_PARTITION_EXCHANGE [$$36] |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$6] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- UNNEST |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inline-funs.plan b/asterix-app/src/test/resources/optimizerts/results/inline-funs.plan
index 70e2daa..413cf0c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inline-funs.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inline-funs.plan
@@ -1,4 +1,3 @@
-- SINK_WRITE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
+ -- ASSIGN |UNPARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan b/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
index 47986a9..768954d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inlined_q18_large_volume_customer.plan
@@ -3,9 +3,9 @@
-- ASSIGN |UNPARTITIONED|
-- STREAM_LIMIT |UNPARTITIONED|
-- SORT_MERGE_EXCHANGE [$$12(DESC), $$11(ASC) ] |PARTITIONED|
- -- STREAM_LIMIT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$12(DESC), $$11(ASC)] |LOCAL|
+ -- STREAM_LIMIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$12(DESC), $$11(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- PRE_CLUSTERED_GROUP_BY[$$72, $$73] |PARTITIONED|
{
@@ -13,13 +13,13 @@
-- NESTED_TUPLE_SOURCE |LOCAL|
}
-- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$72(ASC), $$73(ASC)] HASH:[$$72, $$73] |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$56, $$57] |LOCAL|
+ -- PRE_CLUSTERED_GROUP_BY[$$56, $$57] |PARTITIONED|
{
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$56(ASC), $$57(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$56(ASC), $$57(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -56,7 +56,7 @@
-- NESTED_TUPLE_SOURCE |LOCAL|
}
-- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$69(ASC)] HASH:[$$69] |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$58] |LOCAL|
+ -- PRE_CLUSTERED_GROUP_BY[$$58] |PARTITIONED|
{
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
@@ -66,10 +66,11 @@
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$60] |PARTITIONED|
-- STREAM_PROJECT |UNPARTITIONED|
-- ASSIGN |UNPARTITIONED|
@@ -78,7 +79,8 @@
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/introhashpartitionmerge.plan b/asterix-app/src/test/resources/optimizerts/results/introhashpartitionmerge.plan
index 097c0df..7c47272 100644
--- a/asterix-app/src/test/resources/optimizerts/results/introhashpartitionmerge.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/introhashpartitionmerge.plan
@@ -1,6 +1,6 @@
-- SINK_WRITE |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$13(ASC) ] |PARTITIONED|
- -- STABLE_SORT [$$13(ASC)] |LOCAL|
+ -- STABLE_SORT [$$13(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -8,10 +8,11 @@
-- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$13(ASC)] HASH:[$$16] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains-panic.plan
index ba99e55..f47d782 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains-panic.plan
@@ -1,9 +1,8 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$5(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains.plan
index a03829e..a0e113c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-contains.plan
@@ -1,12 +1,12 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$5(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check-panic.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan
index a492d19..16df47c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-check.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance-panic.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan
index a492d19..16df47c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-edit-distance.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan
index 215d368..9b8c02b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-edit-distance.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$7(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$7(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-fuzzyeq-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan
index 427cded..efc0c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard-check.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan
index 427cded..efc0c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ngram-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check-panic.plan
index a06af62..658c7a7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check-panic.plan
@@ -1,9 +1,8 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$7(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check.plan
index 93cdf12..4616c55 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-check.plan
@@ -1,12 +1,12 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$7(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$11(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-panic.plan
index a06af62..658c7a7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance-panic.plan
@@ -1,9 +1,8 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$7(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance.plan
index 93cdf12..4616c55 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-edit-distance.plan
@@ -1,12 +1,12 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$7(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$11(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-edit-distance.plan
index 26831cd..04c2b2b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-edit-distance.plan
@@ -1,12 +1,12 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$6(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan
index a492d19..16df47c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-fuzzyeq-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard-check.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/olist-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan
index a492d19..16df47c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-fuzzyeq-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$8(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$8(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard-check.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/ulist-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-contains.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-contains.plan
index ba99e55..f47d782 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-contains.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-contains.plan
@@ -1,9 +1,8 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$5(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-fuzzyeq-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan
index 427cded..efc0c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard-check.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan
index 427cded..efc0c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-basic/word-jaccard.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan
index 148a0a8..1d39102 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_01.plan
@@ -3,14 +3,15 @@
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$14(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$14(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan
index 148a0a8..1d39102 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic-nopanic_02.plan
@@ -3,14 +3,15 @@
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$14(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$14(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let-panic.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan
index 88dfaa2..eb44a07 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-edit-distance-check-let.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan
index 395c9d9..98bb997 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-let.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$11(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan
index 20cfea3..c6292b5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ngram-jaccard-check-multi-let.plan
@@ -3,15 +3,17 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$16(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$16(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let-panic.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let-panic.plan
index 065eb8d..b9c935e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let-panic.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let-panic.plan
@@ -1,9 +1,8 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$8(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let.plan
index b2ab9f1..8ca73c8 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-edit-distance-check-let.plan
@@ -1,12 +1,12 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$8(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$12(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$12(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan
index 427cded..efc0c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/olist-jaccard-check-let.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan
index 427cded..efc0c6f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/ulist-jaccard-check-let.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan
index 395c9d9..98bb997 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-let.plan
@@ -1,11 +1,11 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$11(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan
index 20cfea3..c6292b5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-complex/word-jaccard-check-multi-let.plan
@@ -3,15 +3,17 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$16(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$16(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan
index 32b0ea8..d076896 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance-inline.plan
@@ -1,50 +1,55 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan
index 42c6136..f8c761c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-edit-distance.plan
@@ -1,47 +1,52 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan
index 15b5575..d85439b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-edit-distance.plan
@@ -1,47 +1,52 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$27(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$27(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan
index 0401785..f9cd587 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-fuzzyeq-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan
index 40967a9..68b8344 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard-inline.plan
@@ -2,20 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$30(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$30(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan
index 30c69d0..c81fb49 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ngram-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$27(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$27(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan
index 32b0ea8..d076896 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance-inline.plan
@@ -1,50 +1,55 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan
index 42c6136..f8c761c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-edit-distance.plan
@@ -1,47 +1,52 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan
index 15b5575..d85439b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-edit-distance.plan
@@ -1,47 +1,52 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$27(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$27(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan
index 9d57e49..a5ccaf0 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-fuzzyeq-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan
index e24f034..d56abde 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard-inline.plan
@@ -2,20 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan
index 5fb0b58..7e81b5c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/olist-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan
index 9d57e49..a5ccaf0 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-fuzzyeq-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan
index e24f034..d56abde 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard-inline.plan
@@ -2,20 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan
index 5fb0b58..7e81b5c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/ulist-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan
index 0401785..f9cd587 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-fuzzyeq-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan
index 40967a9..68b8344 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard-inline.plan
@@ -2,20 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$30(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$30(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan
index 30c69d0..c81fb49 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join-noeqjoin/word-jaccard.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$27(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$27(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_01.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan
index 6731e33..8778a91 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_02.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_03.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan
index 9f46c2c..ab51938 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance-check_04.plan
@@ -2,53 +2,58 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_01.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan
index 6731e33..8778a91 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_02.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_03.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan
index ecea305..124683b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-edit-distance_04.plan
@@ -2,53 +2,58 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan
index 1215aef..a303675 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_01.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan
index b61f25f..c1684186 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_02.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan
index b61f25f..c1684186 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-edit-distance_03.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan
index 43a994d..56bdbc0 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan
index 8a4d94a..92aeb25 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$22][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$22][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan
index 43a994d..56bdbc0 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-fuzzyeq-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan
index 14c6700..5b17b83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan
index d7c0f9c..7008fb5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard-check_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$28][$$18] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$18] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$30(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$28][$$18] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$18] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$30(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan
index 14c6700..5b17b83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan
index 40c3151..e0fb775 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ngram-jaccard_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$28][$$17] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$30(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$28][$$17] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$30(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_01.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan
index 6731e33..8778a91 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_02.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_03.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan
index 9f46c2c..ab51938 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance-check_04.plan
@@ -2,53 +2,58 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_01.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan
index 6731e33..8778a91 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_02.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan
index 6439994..dc0290e 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_03.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$26(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan
index ecea305..124683b 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-edit-distance_04.plan
@@ -2,53 +2,58 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$31(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$31(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan
index b61f25f..c1684186 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_01.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan
index 1215aef..a303675 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_02.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan
index b61f25f..c1684186 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-edit-distance_03.plan
@@ -2,50 +2,54 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- UNION_ALL |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- UNION_ALL |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- SPLIT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- NESTED_LOOP |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- SPLIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- NESTED_LOOP |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- SPLIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan
index 8e4525f..fe21162 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan
index 6af20b1..041c44c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan
index 8e4525f..fe21162 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-fuzzyeq-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan
index 7f94245..be2180f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan
index 9c3253d..9ba18b3 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard-check_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan
index 7f94245..be2180f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan
index c2065ae..8fa9f49 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/olist-jaccard_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan
index 8e4525f..fe21162 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan
index 6af20b1..041c44c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$12] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$12] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan
index 8e4525f..fe21162 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-fuzzyeq-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$20][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan
index 7f94245..be2180f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan
index 9c3253d..9ba18b3 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard-check_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan
index 7f94245..be2180f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$13] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$13] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan
index 86ad849..7b94042 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$21][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan
index c2065ae..8fa9f49 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/ulist-jaccard_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$28(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$26][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$28(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan
index 43a994d..56bdbc0 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan
index 8a4d94a..92aeb25 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$22][$$14] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$22][$$14] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan
index 43a994d..56bdbc0 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-fuzzyeq-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$22][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$24(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$24(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan
index 14c6700..9368e16 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan
index d7c0f9c..7008fb5 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard-check_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$28][$$18] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$18] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$30(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$28][$$18] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$18] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$30(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_01.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan
index 14c6700..5b17b83 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_02.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$15] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan
index ecd5df2..118c59d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_03.plan
@@ -2,26 +2,27 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |LOCAL|
- -- STREAM_SELECT |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$16] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC)] |LOCAL|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- INVERTED_INDEX_SEARCH |PARTITIONED|
-- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan
index 40c3151..e0fb775 100644
--- a/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/inverted-index-join/word-jaccard_04.plan
@@ -2,27 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$28][$$17] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ASSIGN |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$30(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INVERTED_INDEX_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$28][$$17] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$30(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INVERTED_INDEX_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan b/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan
index f5f0063..0d301cf 100644
--- a/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/join-super-key_01.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$17, $$22, $$24][$$19, $$23, $$20] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$17, $$22, $$24] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$17, $$22, $$24][$$19, $$23, $$20] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$17, $$22, $$24] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$19, $$23, $$20] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$19, $$23, $$20] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan b/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan
index 6476e6c..f4784ef 100644
--- a/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/join-super-key_02.plan
@@ -2,19 +2,21 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$17, $$23, $$18][$$19, $$22, $$24] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$17, $$23, $$18][$$19, $$22, $$24] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$19, $$24] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$19, $$24] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan
index 87aac7f..1a9fbdb 100644
--- a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_01.plan
@@ -2,28 +2,29 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$22, $$23] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC), $$23(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$22, $$23] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$24, $$22, $$28][$$25, $$19, $$20] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$24, $$22, $$28] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$25, $$19, $$20] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$22, $$23] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC), $$23(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$22, $$23] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$24, $$22, $$28][$$25, $$19, $$20] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$24, $$22, $$28] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$25, $$19, $$20] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan
index 7b0bf69..3926a1a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/loj-super-key_02.plan
@@ -2,29 +2,30 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$22, $$23] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC), $$23(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$25, $$22, $$23][$$24, $$19, $$28] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$19, $$28] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$22, $$23] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC), $$23(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$25, $$22, $$23][$$24, $$19, $$28] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$19, $$28] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan b/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan
index a203f14..c542f97 100644
--- a/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/nested_loj2.plan
@@ -2,43 +2,44 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$25] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- MICRO_PRE_CLUSTERED_GROUP_BY[$$23] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$25(ASC), $$23(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$25] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$23][$$20] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$23] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$25][$$26] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$25] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- MICRO_PRE_CLUSTERED_GROUP_BY[$$23] |LOCAL|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$25(ASC), $$23(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$25] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$23][$$20] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$23] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$25][$$26] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$26] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$20] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$26] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$20] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan b/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan
index 4c7844e..8f0100c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/nested_loj3.plan
@@ -2,58 +2,59 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$42] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- MICRO_PRE_CLUSTERED_GROUP_BY[$$40] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- MICRO_PRE_CLUSTERED_GROUP_BY[$$37, $$38] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- IN_MEMORY_STABLE_SORT [$$37(ASC), $$38(ASC)] |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- IN_MEMORY_STABLE_SORT [$$40(ASC), $$38(ASC)] |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$42(ASC), $$40(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$42] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$48, $$50][$$34, $$35] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$48, $$50] |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$40][$$37] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$40] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$42][$$43] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$43] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$37] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$42] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- MICRO_PRE_CLUSTERED_GROUP_BY[$$40] |LOCAL|
+ {
+ -- AGGREGATE |LOCAL|
+ -- MICRO_PRE_CLUSTERED_GROUP_BY[$$37, $$38] |LOCAL|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- IN_MEMORY_STABLE_SORT [$$37(ASC), $$38(ASC)] |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- IN_MEMORY_STABLE_SORT [$$40(ASC), $$38(ASC)] |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$42(ASC), $$40(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$42] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$48, $$50][$$34, $$35] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$48, $$50] |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$40][$$37] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$40] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$42][$$43] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$43] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$37] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan b/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan
index 32a0b06..5e9c657 100644
--- a/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/noncollocated.plan
@@ -2,19 +2,20 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$10][$$11] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$10][$$11] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$10] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$11] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orderby-desc-using-gby.plan b/asterix-app/src/test/resources/optimizerts/results/orderby-desc-using-gby.plan
index 671187a..3a6718f 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orderby-desc-using-gby.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orderby-desc-using-gby.plan
@@ -7,12 +7,13 @@
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$9(DESC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$9(DESC)] |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$9] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan b/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan
index df2a96e..a1f97f3 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-aggreg.plan
@@ -9,16 +9,16 @@
-- NESTED_TUPLE_SOURCE |LOCAL|
}
-- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$24(ASC)] HASH:[$$24] |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$16] |LOCAL|
+ -- PRE_CLUSTERED_GROUP_BY[$$16] |PARTITIONED|
{
-- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$16(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$16(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan b/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan
index a89ceb8..327253a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-composite-index-search.plan
@@ -2,14 +2,15 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$21(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$21(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_01.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_01.plan
index 559f0d1..69d88b6 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_01.plan
@@ -5,14 +5,15 @@
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$19(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$19(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_02.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_02.plan
index 685d093..dd3990a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive-open_02.plan
@@ -2,17 +2,18 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$14(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_01.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_01.plan
index 559f0d1..69d88b6 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_01.plan
@@ -5,14 +5,15 @@
-- STREAM_PROJECT |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$19(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$19(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_02.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_02.plan
index 685d093..dd3990a 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-conjunctive_02.plan
@@ -2,17 +2,18 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- SORT_MERGE_EXCHANGE [$$14(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan
index 1397437..3a6ffad 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search-open.plan
@@ -2,14 +2,15 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$13(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$13(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan b/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan
index 1397437..3a6ffad 100644
--- a/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/orders-index-search.plan
@@ -2,14 +2,15 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$13(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$13(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan
index 6cce309..6b5f842 100644
--- a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search-open.plan
@@ -2,8 +2,9 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan
index 6cce309..6b5f842 100644
--- a/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/prim-idx-search.plan
@@ -2,8 +2,9 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan b/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan
index a82190a..f242afd 100644
--- a/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/pull_select_above_eq_join.plan
@@ -2,20 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$18][$$19] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$18] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$18][$$19] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$18] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$19] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$19] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan b/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan
index e74eaee..0431497 100644
--- a/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/push-project-through-group.plan
@@ -2,30 +2,32 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$15] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$15(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$17][$$16] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$15] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$15(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$15] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$17][$$16] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$17] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$16] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/push_limit.plan b/asterix-app/src/test/resources/optimizerts/results/push_limit.plan
index 21d56c6..0169800 100644
--- a/asterix-app/src/test/resources/optimizerts/results/push_limit.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/push_limit.plan
@@ -2,12 +2,13 @@
-- STREAM_PROJECT |UNPARTITIONED|
-- ASSIGN |UNPARTITIONED|
-- STREAM_LIMIT |UNPARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$9(ASC) ] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$9(ASC) ] |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_LIMIT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_LIMIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/q1.plan b/asterix-app/src/test/resources/optimizerts/results/q1.plan
index d723324..82a1c28 100644
--- a/asterix-app/src/test/resources/optimizerts/results/q1.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/q1.plan
@@ -2,17 +2,19 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- SUBPLAN |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- STREAM_SELECT |LOCAL|
- -- UNNEST |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- SUBPLAN |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- STREAM_SELECT |LOCAL|
+ -- UNNEST |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/q2.plan b/asterix-app/src/test/resources/optimizerts/results/q2.plan
index 11489a7..c8eb368 100644
--- a/asterix-app/src/test/resources/optimizerts/results/q2.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/q2.plan
@@ -2,39 +2,43 @@
-- STREAM_PROJECT |UNPARTITIONED|
-- ASSIGN |UNPARTITIONED|
-- STREAM_LIMIT |UNPARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$26(DESC) ] |PARTITIONED|
- -- STREAM_LIMIT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$26(DESC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$32] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- {
- -- AGGREGATE |LOCAL|
- -- MICRO_PRE_CLUSTERED_GROUP_BY[$$33] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$26(DESC) ] |PARTITIONED|
+ -- STREAM_LIMIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$26(DESC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$32] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$32(ASC), $$33(ASC)] HASH:[$$32] |PARTITIONED|
- -- PRE_CLUSTERED_GROUP_BY[$$23, $$24] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
+ }
+ {
+ -- AGGREGATE |LOCAL|
+ -- MICRO_PRE_CLUSTERED_GROUP_BY[$$33] |LOCAL|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
-- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$23(ASC), $$24(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ }
+ -- HASH_PARTITION_MERGE_EXCHANGE MERGE:[$$32(ASC), $$33(ASC)] HASH:[$$32] |PARTITIONED|
+ -- PRE_CLUSTERED_GROUP_BY[$$23, $$24] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$23(ASC), $$24(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- UNNEST |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/q3_shipping_priority.plan b/asterix-app/src/test/resources/optimizerts/results/q3_shipping_priority.plan
index 3f105d7..b96afc7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/q3_shipping_priority.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/q3_shipping_priority.plan
@@ -2,50 +2,52 @@
-- STREAM_PROJECT |UNPARTITIONED|
-- ASSIGN |UNPARTITIONED|
-- STREAM_LIMIT |UNPARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$46(DESC), $$4(ASC) ] |PARTITIONED|
- -- STREAM_LIMIT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$46(DESC), $$4(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EXTERNAL_GROUP_BY[$$56, $$57, $$58] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- HASH_PARTITION_EXCHANGE [$$56, $$57, $$58] |PARTITIONED|
- -- EXTERNAL_GROUP_BY[$$44, $$41, $$39] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$43][$$44] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$43] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$42][$$50] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$50] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$46(DESC), $$4(ASC) ] |PARTITIONED|
+ -- STREAM_LIMIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$46(DESC), $$4(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$56, $$57, $$58] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- HASH_PARTITION_EXCHANGE [$$56, $$57, $$58] |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$44, $$41, $$39] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$43][$$44] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$43] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$42][$$50] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$44] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$50] |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$44] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan b/asterix-app/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
index ef68caf..5fca854 100644
--- a/asterix-app/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/q5_local_supplier_volume.plan
@@ -1,80 +1,83 @@
-- SINK_WRITE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- SORT_MERGE_EXCHANGE [$$89(DESC) ] |PARTITIONED|
- -- STABLE_SORT [$$89(DESC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EXTERNAL_GROUP_BY[$$120] |PARTITIONED|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- HASH_PARTITION_EXCHANGE [$$120] |PARTITIONED|
- -- EXTERNAL_GROUP_BY[$$94] |LOCAL|
- {
- -- AGGREGATE |LOCAL|
- -- NESTED_TUPLE_SOURCE |LOCAL|
- }
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$81, $$115][$$112, $$88] |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$112] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$82][$$83] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- SORT_MERGE_EXCHANGE [$$89(DESC) ] |PARTITIONED|
+ -- STABLE_SORT [$$89(DESC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$120] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- HASH_PARTITION_EXCHANGE [$$120] |PARTITIONED|
+ -- EXTERNAL_GROUP_BY[$$94] |PARTITIONED|
+ {
+ -- AGGREGATE |LOCAL|
+ -- NESTED_TUPLE_SOURCE |LOCAL|
+ }
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$81, $$115][$$112, $$88] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$83] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$99][$$85] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$99] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$85] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$112] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$82][$$83] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$88][$$86] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$88] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$83] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$99][$$85] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$99] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$86] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- HYBRID_HASH_JOIN [$$92][$$87] |PARTITIONED|
- -- HASH_PARTITION_EXCHANGE [$$92] |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$85] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$88][$$86] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$88] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$86] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- HYBRID_HASH_JOIN [$$92][$$87] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$92] |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/optimizerts/results/record_access.plan b/asterix-app/src/test/resources/optimizerts/results/record_access.plan
index 70e2daa..413cf0c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/record_access.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/record_access.plan
@@ -1,4 +1,3 @@
-- SINK_WRITE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
+ -- ASSIGN |UNPARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan
index 73968f0..6f311aa 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_01.plan
@@ -2,19 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- RTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- RTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan
index 73968f0..6f311aa 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_02.plan
@@ -2,19 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- RTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- RTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan
index 73968f0..6f311aa 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-index-join/spatial-intersect-point_03.plan
@@ -2,19 +2,22 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- RTREE_SEARCH |PARTITIONED|
- -- BROADCAST_EXCHANGE |PARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- RTREE_SEARCH |PARTITIONED|
+ -- BROADCAST_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
index 180a756..dc9b260 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index-open.plan
@@ -2,15 +2,17 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- RTREE_SEARCH |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- RTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
index 180a756..dc9b260 100644
--- a/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/rtree-secondary-index.plan
@@ -2,15 +2,17 @@
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$22(ASC)] |LOCAL|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- STABLE_SORT [$$22(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- RTREE_SEARCH |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- RTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/scan-delete-all.plan b/asterix-app/src/test/resources/optimizerts/results/scan-delete-all.plan
index c000d63..395fec7 100644
--- a/asterix-app/src/test/resources/optimizerts/results/scan-delete-all.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/scan-delete-all.plan
@@ -1,13 +1,14 @@
-- SINK |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INSERT_DELETE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$19(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$19] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$19(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$19] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/scan-delete-rtree-secondary-index.plan b/asterix-app/src/test/resources/optimizerts/results/scan-delete-rtree-secondary-index.plan
index ebcef5d..79daefd 100644
--- a/asterix-app/src/test/resources/optimizerts/results/scan-delete-rtree-secondary-index.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/scan-delete-rtree-secondary-index.plan
@@ -1,21 +1,25 @@
-- SINK |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- INDEX_INSERT_DELETE |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$29(ASC), $$30(ASC), $$31(ASC), $$32(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INSERT_DELETE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$14(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- BTREE_SEARCH |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$29(ASC), $$30(ASC), $$31(ASC), $$32(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$14(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$14] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- BTREE_SEARCH |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/scan-delete.plan b/asterix-app/src/test/resources/optimizerts/results/scan-delete.plan
index 57aafc3..0e7cf78 100644
--- a/asterix-app/src/test/resources/optimizerts/results/scan-delete.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/scan-delete.plan
@@ -1,15 +1,16 @@
-- SINK |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INSERT_DELETE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$21(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$21] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$21(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$21] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/scan-insert-secondary-index.plan b/asterix-app/src/test/resources/optimizerts/results/scan-insert-secondary-index.plan
index 765770e..650f512 100644
--- a/asterix-app/src/test/resources/optimizerts/results/scan-insert-secondary-index.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/scan-insert-secondary-index.plan
@@ -1,27 +1,30 @@
-- SINK |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- INDEX_INSERT_DELETE |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$11(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- INDEX_INSERT_DELETE |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$10(ASC)] |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- ASSIGN |UNPARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$11(ASC)] |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INDEX_INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$10(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INSERT_DELETE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$6(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$6(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/scan-insert.plan b/asterix-app/src/test/resources/optimizerts/results/scan-insert.plan
index 2fe55a5..5cfcdf3 100644
--- a/asterix-app/src/test/resources/optimizerts/results/scan-insert.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/scan-insert.plan
@@ -1,14 +1,16 @@
-- SINK |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- INSERT_DELETE |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$6(ASC)] |LOCAL|
- -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- INSERT_DELETE |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$6(ASC)] |PARTITIONED|
+ -- HASH_PARTITION_EXCHANGE [$$6] |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_01.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_02.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_03.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_04.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_05.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_06.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_07.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-let-to-edit-distance-check_08.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_01.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_02.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_03.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_04.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_05.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_06.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_07.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/edit-distance-to-edit-distance-check_08.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-edit-distance-check.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/fuzzyeq-to-jaccard-check.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_01.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_02.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_03.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan
index 553a531..998e003 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_04.plan
@@ -3,8 +3,10 @@
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- STREAM_SELECT |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_05.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_06.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_07.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan
index 467de52..ba1c86c 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-let-to-jaccard-check_08.plan
@@ -1,9 +1,10 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_01.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_02.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_03.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_04.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_05.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_06.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_07.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan
index 4189498..1924060 100644
--- a/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/similarity/jaccard-to-jaccard-check_08.plan
@@ -1,7 +1,7 @@
-- SINK_WRITE |PARTITIONED|
-- RANDOM_MERGE_EXCHANGE |PARTITIONED|
- -- STREAM_PROJECT |PARTITIONED|
- -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_SELECT |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/sort-cust.plan b/asterix-app/src/test/resources/optimizerts/results/sort-cust.plan
index f790019..5f29f85 100644
--- a/asterix-app/src/test/resources/optimizerts/results/sort-cust.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/sort-cust.plan
@@ -3,13 +3,14 @@
-- ASSIGN |UNPARTITIONED|
-- STREAM_LIMIT |UNPARTITIONED|
-- SORT_MERGE_EXCHANGE [$$7(ASC) ] |PARTITIONED|
- -- STREAM_LIMIT |LOCAL|
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$7(ASC)] |LOCAL|
+ -- STREAM_LIMIT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$7(ASC)] |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/unnest-to-join_02.plan b/asterix-app/src/test/resources/optimizerts/results/unnest-to-join_02.plan
index 1f56c4f..bd2eb3d 100644
--- a/asterix-app/src/test/resources/optimizerts/results/unnest-to-join_02.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/unnest-to-join_02.plan
@@ -1,11 +1,10 @@
-- SINK_WRITE |UNPARTITIONED|
- -- STREAM_PROJECT |UNPARTITIONED|
- -- AGGREGATE |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- HYBRID_HASH_JOIN [$$0][$$1] |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- UNNEST |UNPARTITIONED|
- -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
- -- UNNEST |UNPARTITIONED|
- -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
+ -- AGGREGATE |UNPARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
+ -- HYBRID_HASH_JOIN [$$0][$$1] |UNPARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
+ -- UNNEST |UNPARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |UNPARTITIONED|
+ -- UNNEST |UNPARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |UNPARTITIONED|
diff --git a/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan b/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan
index 531ce61..7218717 100644
--- a/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan
+++ b/asterix-app/src/test/resources/optimizerts/results/unnest_list_in_subplan.plan
@@ -9,8 +9,8 @@
-- STREAM_SELECT |LOCAL|
-- NESTED_TUPLE_SOURCE |LOCAL|
}
- -- ONE_TO_ONE_EXCHANGE |LOCAL|
- -- STABLE_SORT [$$20(ASC), $$18(ASC)] |LOCAL|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- STABLE_SORT [$$20(ASC), $$18(ASC)] |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$20] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
@@ -18,15 +18,16 @@
-- HASH_PARTITION_EXCHANGE [$$3] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- UNNEST |PARTITIONED|
- -- ASSIGN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- DATASOURCE_SCAN |PARTITIONED|
- -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- STREAM_PROJECT |PARTITIONED|
+ -- ASSIGN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- DATASOURCE_SCAN |PARTITIONED|
+ -- ONE_TO_ONE_EXCHANGE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
-- HASH_PARTITION_EXCHANGE [$$22] |PARTITIONED|
-- STREAM_PROJECT |PARTITIONED|
-- ASSIGN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
-- DATASOURCE_SCAN |PARTITIONED|
-- ONE_TO_ONE_EXCHANGE |PARTITIONED|
- -- EMPTY_TUPLE_SOURCE |PARTITIONED|
+ -- EMPTY_TUPLE_SOURCE |PARTITIONED|
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
new file mode 100644
index 0000000..e0a0695
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/dml/query-issue205.aql
@@ -0,0 +1,33 @@
+/*
+ * Description : This test case is to verify the fix for issue205
+ : https://code.google.com/p/asterixdb/issues/detail?id=205
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type EmployeeStat as open {
+ age: int32,
+ salary:int32
+}
+
+create type EmployeeType as closed {
+ id:string,
+ stat:EmployeeStat,
+ deptCode:int32
+}
+
+create dataset Employees(EmployeeType)
+ partitioned by key id;
+
+insert into dataset Employees({"id":"1234", "stat":{ "age":50, "salary":120000}, "deptCode":32 });
+insert into dataset Employees({"id":"5678", "stat":{ "age":40, "salary":100000}, "deptCode":16 });
+
+delete $l from dataset Employees where $l.id = "1234";
+
+write output to nc1:"rttest/dml_query-issue205.adm";
+for $l in dataset('Employees')
+return $l
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
new file mode 100644
index 0000000..d4dcd38
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_01.aql
@@ -0,0 +1,27 @@
+/*
+ * Description : Create a feed dataset and verify contents in Metadata
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+write output to nc1:"rttest/feeds_feeds_01.adm";
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
new file mode 100644
index 0000000..3129d63
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02.aql
@@ -0,0 +1,31 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ Begin ingestion and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+begin feed TweetFeed;
+
+write output to nc1:"rttest/feeds_feeds_02.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
new file mode 100644
index 0000000..a4b22d0
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_03.aql
@@ -0,0 +1,34 @@
+/*
+ * Description : Create a feed dataset with an associated function and verify contents in Metadata
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create function feed_processor($x) {
+$x
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+apply function feed_processor@1
+partitioned by key id;
+
+write output to nc1:"rttest/feeds_feeds_03.adm";
+
+for $x in dataset('Metadata.Dataset')
+where $x.DataverseName='feeds' and $x.DatasetName='TweetFeed'
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
new file mode 100644
index 0000000..c38cfd2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04.aql
@@ -0,0 +1,32 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ The feed simulator simulates feed from a file in the HDFS.
+ Begin ingestion and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+begin feed TweetFeed;
+
+write output to nc1:"rttest/feeds_feeds_04.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
new file mode 100644
index 0000000..a7dc4fa
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds.aql
@@ -0,0 +1,31 @@
+/*
+ * Description : Create a feed dataset that uses the feed simulator adapter.
+ Begin ingestion using a fully qualified name and verify contents of the dataset post completion.
+ * Expected Res : Success
+ * Date : 24th Dec 2012
+ */
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TweetType as closed {
+ id: string,
+ username : string,
+ location : string,
+ text : string,
+ timestamp : string
+}
+
+create feed dataset TweetFeed(TweetType)
+using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"))
+partitioned by key id;
+
+begin feed feeds.TweetFeed;
+
+write output to nc1:"rttest/feeds_issue_230_feeds.adm";
+
+for $x in dataset('TweetFeed')
+return $x
+
+drop dataverse feeds;
diff --git a/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
new file mode 100644
index 0000000..7bf7d5f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/fuzzyjoin/dblp-aqlplus_2.aql
@@ -0,0 +1,32 @@
+/*
+ * Description : Tests that a proper error messags is returned for this scenario.
+ * Since we cannot statically know the type of the field 'title', the FuzzyEqRule
+ * cannot auto-inject a tokenizer, and hence we expect an error saying that we cannot
+ * scan over a string as if it were a collection.
+ * Guards against regression to issue 207.
+ * Success : Yes
+ */
+
+drop dataverse fuzzyjoin if exists;
+create dataverse fuzzyjoin;
+use dataverse fuzzyjoin;
+
+create type DBLPType as open {
+ id: int32
+}
+
+create dataset DBLP(DBLPType) partitioned by key id;
+
+load dataset DBLP
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/dblp-small.adm"),("format"="adm"));
+
+write output to nc1:'rttest/fuzzyjoin_dblp-aqlplus_2.adm';
+
+set simthreshold '.5f';
+
+for $dblp in dataset('DBLP')
+for $dblp2 in dataset('DBLP')
+where $dblp.title ~= $dblp2.title and $dblp.id < $dblp2.id
+order by $dblp.id, $dblp2.id
+return {'dblp': $dblp, 'dblp2': $dblp2}
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
new file mode 100644
index 0000000..7a0494f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_02.aql
@@ -0,0 +1,26 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
+ Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using "hdfs"
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/textFileS"),("input-format"="sequence-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_hdfs_02.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
new file mode 100644
index 0000000..fc5b3ab
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/hdfs_03.aql
@@ -0,0 +1,28 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a large (35kb) text file in HDFS.
+ The input file is sufficiently large to guarantee that # of bytes > than internal buffer of size 8192.
+ This causes a record to span across the buffer size boundaries.
+ Perform a word-count over the data in the dataset.
+* Expected Res : Success
+* Date : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ content: string
+};
+
+create external dataset TextDataset(LineType)
+using "hdfs"
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/large_text"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_hdfs_03.adm";
+
+for $line in dataset('TextDataset')
+let $tokens := word-tokens($line.content)
+for $token in $tokens
+group by $tok := $token with $token
+order by $tok
+return { "word": $tok, "count": count($token) }
diff --git a/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
new file mode 100644
index 0000000..c2a0963
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/hdfs/issue_245_hdfs.aql
@@ -0,0 +1,23 @@
+/*
+* Description : Create an external dataset that contains a tuples, the lines from a file in HDFS.
+ Iterate over the contained tuples.
+* Expected Res : Success
+* Issue : 245
+* Date : 7th Jan 2013
+*/
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type LineType as closed {
+ line: string
+};
+
+create external dataset TextDataset(LineType)
+using "hdfs"
+(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/asterix_info.txt"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="."));
+
+write output to nc1:"rttest/hdfs_issue_245_hdfs.adm";
+
+for $x in dataset('TextDataset')
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
new file mode 100644
index 0000000..9b25210
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/load/issue14_query.aql
@@ -0,0 +1,26 @@
+/*
+ * Description : Create and load a dataset but with an unspecified data format.
+ * Expected Res : Failure
+ * Date : 16 Jan 2012
+ */
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type Schema as closed{
+id: int32,
+age: int32,
+name: string
+}
+
+create dataset onektup(Schema)
+partitioned by key id;
+
+load dataset onektup
+using "localfs"(("path"="nc1:///tmp/one.adm"));
+
+write output to nc1:"/tmp/foo.adm";
+
+for $l in dataset('onektup')
+return $l
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue134.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue134.aql
new file mode 100644
index 0000000..62a9ed8
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue134.aql
@@ -0,0 +1,11 @@
+/*
+ * Description : This test case is to verify the fix for issue134
+ : https://code.google.com/p/asterixdb/issues/detail?id=134
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+write output to nc1:"rttest/open-closed_query-issue134.adm";
+
+let $a:=true
+return {{[1,2,3,4,5],[6,5,3,8,9],[44,22,66,-1,0,99.9]}}
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue166.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue166.aql
new file mode 100644
index 0000000..aa0d13b
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue166.aql
@@ -0,0 +1,11 @@
+/*
+ * Description : This test case is to verify the fix for issue166
+ : https://code.google.com/p/asterixdb/issues/detail?id=166
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+write output to nc1:"rttest/open-closed_query-issue166.adm";
+
+let $a := [[1,2,3],[4,5,6,7]]
+return $a[1]
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
new file mode 100644
index 0000000..e46286c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue208.aql
@@ -0,0 +1,45 @@
+/*
+ * Description : This test case is to verify the fix for issue208
+ : https://code.google.com/p/asterixdb/issues/detail?id=208
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+drop dataverse OpenSocialNetworkData if exists;
+create dataverse OpenSocialNetworkData;
+
+use dataverse OpenSocialNetworkData;
+
+create type TwitterUserType as open {
+screen-name: string,
+lang: string,
+friends_count: int32,
+statuses_count: int32,
+name: string,
+followers_count: int32
+}
+
+create type TweetMessageType as open {
+tweetid: string,
+tweetid-copy: string,
+send-time-copy: datetime
+}
+
+create dataset TweetMessages(TweetMessageType)
+partitioned by key tweetid;
+
+load dataset TweetMessages
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/twitter/tw_messages.adm"),("format"="adm"));
+
+write output to nc1:"rttest/open-closed_query-issue208.adm";
+for $t in dataset('TweetMessages')
+where $t.send-time >= datetime('2005-04-13T17:17:22') and
+$t.send-time <= datetime('2011-04-13T17:18:22')
+group by $uid := $t.user.screen-name with $t
+order by $uid
+return {
+ "user": $uid,
+ "count": count($t)
+}
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
new file mode 100644
index 0000000..3d8ab69
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue236.aql
@@ -0,0 +1,52 @@
+/*
+ * Description : This test case is to verify the fix for issue236
+ : https://code.google.com/p/asterixdb/issues/detail?id=236
+ * Expected Res : Success
+ * Date : 20 Dec. 2012
+ */
+
+drop dataverse SocialNetworkData if exists;
+
+create dataverse SocialNetworkData;
+use dataverse SocialNetworkData;
+
+create type TwitterUserType as open {
+screen-name: string,
+lang: string,
+friends_count: int32,
+statuses_count: int32,
+name: string,
+followers_count: int32
+}
+
+create type TweetMessageType as closed {
+tweetid: string,
+tweetid-copy: string,
+user: TwitterUserType,
+sender-location: point?,
+send-time: datetime,
+send-time-copy: datetime,
+referred-topics: {{ string }},
+message-text: string
+}
+
+create dataset TweetMessages(TweetMessageType)
+partitioned by key tweetid;
+
+
+insert into dataset TweetMessages(
+{
+"tweetid": "1111387810",
+"tweetid-copy": "1111387810",
+"user": { "screen-name": "TonyNapier#786", "lang": "en", "friends_count": 4241366,
+"statuses_count": 97, "name": "Tony Napier", "followers_count": 5984113 },
+"sender-location": point("29.24,78.35"),
+"send-time": datetime("2011-11-24T14:24:51.000Z"),
+"send-time-copy": datetime("2011-11-24T14:24:51.000Z"),
+"referred-topics": {{ "sprint", "wireless" }},
+"message-text": " love sprint its wireless is mind-blowing:)"
+});
+
+write output to nc1:"rttest/open-closed_query-issue236.adm";
+for $r in dataset('TweetMessages')
+return $r
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue29.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue29.aql
new file mode 100644
index 0000000..8b778ba
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue29.aql
@@ -0,0 +1,70 @@
+/*
+ * Description : This test case is to verify the fix for issue29
+ : https://code.google.com/p/asterixdb/issues/detail?id=29
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+write output to nc1:"rttest/open-closed_query-issue29.adm";
+
+let $tweets :=
+{{
+ {
+ "tweetid": "1023",
+ "user": {
+ "screen-name": "dflynn24",
+ "lang": "en",
+ "friends_count": 46,
+ "statuses_count": 987,
+ "name": "danielle flynn",
+ "followers_count": 47
+ },
+ "sender-location": "40.904177,-72.958996",
+ "send-time": "2010-02-21T11:56:02-05:00",
+ "referred-topics": {{ "verizon" }},
+ "message-text": "i need a #verizon phone like nowwwww! :("
+ },
+ {
+ "tweetid": "1024",
+ "user": {
+ "screen-name": "miriamorous",
+ "lang": "en",
+ "friends_count": 69,
+ "statuses_count": 1068,
+ "name": "Miriam Songco",
+ "followers_count": 78
+ },
+ "send-time": "2010-02-21T11:11:43-08:00",
+ "referred-topics": {{ "commercials", "verizon", "att" }},
+ "message-text": "#verizon & #att #commercials, so competitive"
+ },
+ {
+ "tweetid": "1025",
+ "user": {
+ "screen-name": "dj33",
+ "lang": "en",
+ "friends_count": 96,
+ "statuses_count": 1696,
+ "name": "Don Jango",
+ "followers_count": 22
+ },
+ "send-time": "2010-02-21T12:38:44-05:00",
+ "referred-topics": {{ "charlotte" }},
+ "message-text": "Chillin at dca waiting for 900am flight to #charlotte and from there to providenciales"
+ },
+ {
+ "tweetid": "1026",
+ "user": {
+ "screen-name": "reallyleila",
+ "lang": "en",
+ "friends_count": 106,
+ "statuses_count": 107,
+ "name": "Leila Samii",
+ "followers_count": 52
+ },
+ "send-time": "2010-02-21T21:31:57-06:00",
+ "referred-topics": {{ "verizon", "at&t", "iphone" }},
+ "message-text": "I think a switch from #verizon to #at&t may be in my near future... my smartphone is like a land line compared to the #iphone!"
+ }
+}}
+return $tweets
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue55-1.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue55-1.aql
new file mode 100644
index 0000000..11b75d1
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue55-1.aql
@@ -0,0 +1,13 @@
+/*
+ * Description : This test case is to verify the fix for issue55 query 1
+ : https://code.google.com/p/asterixdb/issues/detail?id=55
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+write output to nc1:"rttest/open-closed_query-issue55-1.adm";
+
+let $l := [1.1f, 1.0f, 1.2f, 0.9, 1.3, 1, 2]
+for $i in $l
+for $j in $l
+return [$i, $j, "=", $i = $j, "<", $i < $j, "<=", $i <= $j, ">", $i > $j, ">=", $i >= $j]
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue55.aql b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue55.aql
new file mode 100644
index 0000000..b4e4572
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/open-closed/query-issue55.aql
@@ -0,0 +1,11 @@
+/*
+ * Description : This test case is to verify the fix for issue55 query 2
+ : https://code.google.com/p/asterixdb/issues/detail?id=55
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+write output to nc1:"rttest/open-closed_query-issue55.adm";
+
+for $x in [[1,3],[4,5,2],[-1,-3,0],["a"]]
+return $x
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql b/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
new file mode 100644
index 0000000..2592c67
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/records/field-access-on-open-field.aql
@@ -0,0 +1,24 @@
+/*
+ * Description : Tests whether a field access on an open field (statically of type ANY) succeeds.
+ * Guards against regression to issue 207.
+ * Success : Yes
+ */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type TestType as open {
+ id : int32,
+ name : string
+}
+
+create dataset testds(TestType) partitioned by key id;
+
+insert into dataset testds({"id": 123, "name": "John Doe", "address": { "zip": 92617} });
+
+write output to nc1:"rttest/records_field-access-on-open-field.adm";
+
+for $l in dataset("testds")
+let $a := $l.address
+return $a.zip
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
new file mode 100644
index 0000000..ff04a36
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_1.aql
@@ -0,0 +1,35 @@
+/*
+* Description : Create an dataset and load it from two file splits
+ Include whitespace between the elements in the comma-separated list of file paths.
+* Expected Res : Success
+* Issue : 238
+* Date : 7th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLPadm(DBLPType)
+ partitioned by key id;
+
+// drop dataset DBLPadm;
+load dataset DBLPadm
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/part-00000.adm, nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/scan_issue238_query_1.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
new file mode 100644
index 0000000..297e2f2
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/scan/issue238_query_2.aql
@@ -0,0 +1,36 @@
+/*
+* Description : Create an dataset and load it from two file splits
+ Include newline between the elements in the comma-separated list of file paths.
+* Expected Res : Success
+* Issue : 238
+* Date : 7th Jan 2013
+*/
+
+/* scan and print an ADM file as a dataset of closed records */
+
+drop dataverse test if exists;
+create dataverse test;
+use dataverse test;
+
+create type DBLPType as closed {
+ id: int32,
+ dblpid: string,
+ title: string,
+ authors: string,
+ misc: string
+}
+
+create dataset DBLPadm(DBLPType)
+ partitioned by key id;
+
+// drop dataset DBLPadm;
+load dataset DBLPadm
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/dblp-small/part-00000.adm,
+ nc1://data/dblp-small/part-00001.adm"),("format"="adm"));
+
+write output to nc1:"rttest/scan_issue238_query_2.adm";
+
+for $paper in dataset('DBLPadm')
+order by $paper.id
+return $paper
diff --git a/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
new file mode 100644
index 0000000..cfac014
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/tpch/q10_returned_item_int64.aql
@@ -0,0 +1,195 @@
+drop dataverse tpch if exists;
+create dataverse tpch;
+
+use dataverse tpch;
+
+create type LineItemType as closed {
+ l_orderkey: int64,
+ l_partkey: int64,
+ l_suppkey: int64,
+ l_linenumber: int64,
+ l_quantity: int64,
+ l_extendedprice: double,
+ l_discount: double,
+ l_tax: double,
+ l_returnflag: string,
+ l_linestatus: string,
+ l_shipdate: string,
+ l_commitdate: string,
+ l_receiptdate: string,
+ l_shipinstruct: string,
+ l_shipmode: string,
+ l_comment: string
+}
+
+create type OrderType as closed {
+ o_orderkey: int64,
+ o_custkey: int64,
+ o_orderstatus: string,
+ o_totalprice: double,
+ o_orderdate: string,
+ o_orderpriority: string,
+ o_clerk: string,
+ o_shippriority: int64,
+ o_comment: string
+}
+
+create type CustomerType as closed {
+ c_custkey: int64,
+ c_name: string,
+ c_address: string,
+ c_nationkey: int64,
+ c_phone: string,
+ c_acctbal: double,
+ c_mktsegment: string,
+ c_comment: string
+}
+
+create type SupplierType as closed {
+ s_suppkey: int64,
+ s_name: string,
+ s_address: string,
+ s_nationkey: int64,
+ s_phone: string,
+ s_acctbal: double,
+ s_comment: string
+}
+
+create type NationType as closed {
+ n_nationkey: int64,
+ n_name: string,
+ n_regionkey: int64,
+ n_comment: string
+}
+
+create type RegionType as closed {
+ r_regionkey: int64,
+ r_name: string,
+ r_comment: string
+}
+
+create type PartType as closed {
+ p_partkey: int64,
+ p_name: string,
+ p_mfgr: string,
+ p_brand: string,
+ p_type: string,
+ p_size: int64,
+ p_container: string,
+ p_retailprice: double,
+ p_comment: string
+}
+
+create type PartSuppType as closed {
+ ps_partkey: int64,
+ ps_suppkey: int64,
+ ps_availqty: int64,
+ ps_supplycost: double,
+ ps_comment: string
+}
+
+create dataset LineItem(LineItemType)
+ partitioned by key l_orderkey, l_linenumber;
+create dataset Orders(OrderType)
+ partitioned by key o_orderkey;
+create dataset Supplier(SupplierType)
+ partitioned by key s_suppkey;
+create dataset Region(RegionType)
+ partitioned by key r_regionkey;
+create dataset Nation(NationType)
+ partitioned by key n_nationkey;
+create dataset Part(PartType)
+ partitioned by key p_partkey;
+create dataset Partsupp(PartSuppType)
+ partitioned by key ps_partkey, ps_suppkey;
+create dataset Customer(CustomerType)
+ partitioned by key c_custkey;
+
+load dataset LineItem
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Orders
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/orders.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Supplier
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/supplier.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Region
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/region.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Nation
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/nation.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Part
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/part.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Partsupp
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/partsupp.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+load dataset Customer
+using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="nc1://data/tpch0.001/customer.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
+
+write output to nc1:"rttest/tpch_q10_returned_item_int64.adm";
+
+
+for $locn in (
+for $l in dataset('LineItem')
+for $ocn in (
+ for $o in dataset('Orders')
+ for $c in dataset('Customer')
+ where $c.c_custkey = $o.o_custkey and $o.o_orderdate >= '1993-10-01' and $o.o_orderdate < '1994-01-01'
+ for $n in dataset('Nation')
+ where $c.c_nationkey = $n.n_nationkey
+ return {
+ "c_custkey": $c.c_custkey,
+ "c_name": $c.c_name,
+ "c_acctbal": $c.c_acctbal,
+ "n_name": $n.n_name,
+ "c_address": $c.c_address,
+ "c_phone": $c.c_phone,
+ "c_comment": $c.c_comment,
+ "o_orderkey": $o.o_orderkey
+ }
+)
+where
+ $l.l_orderkey = $ocn.o_orderkey and $l.l_returnflag = 'R'
+ return {
+ "c_custkey": $ocn.c_custkey,
+ "c_name": $ocn.c_name,
+ "c_acctbal": $ocn.c_acctbal,
+ "n_name": $ocn.n_name,
+ "c_address": $ocn.c_address,
+ "c_phone": $ocn.c_phone,
+ "c_comment": $ocn.c_comment,
+ "l_extendedprice": $l.l_extendedprice,
+ "l_discount": $l.l_discount
+ }
+)
+group by $c_custkey:=$locn.c_custkey,
+ $c_name:=$locn.c_name,
+ $c_acctbal:=$locn.c_acctbal, $c_phone:=$locn.c_phone,
+ $n_name:=$locn.n_name, $c_address:=$locn.c_address, $c_comment:=$locn.c_comment
+ with $locn
+let $revenue := sum(for $i in $locn return $i.l_extendedprice * (1 - $i.l_discount))
+order by $revenue desc
+limit 20
+return {
+ "c_custkey": $c_custkey,
+ "c_name": $c_name,
+ "revenue": $revenue,
+ "c_acctbal": $c_acctbal,
+ "n_name": $n_name,
+ "c_address": $c_address,
+ "c_phone": $c_phone,
+ "c_comment": $c_comment
+}
+
+
diff --git a/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/query-issue201.aql b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/query-issue201.aql
new file mode 100644
index 0000000..0505179
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/user-defined-functions/query-issue201.aql
@@ -0,0 +1,12 @@
+/*
+ * Description : This test case is to verify the fix for issue201
+ : https://code.google.com/p/asterixdb/issues/detail?id=201
+ * Expected Res : Success
+ * Date : 26th November 2012
+ */
+
+write output to nc1:"rttest/user-defined-functions_query-issue201.adm";
+
+let $x:=range(1,100)
+for $i in $x
+return $i
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/dml/query-issue205.adm b/asterix-app/src/test/resources/runtimets/results/dml/query-issue205.adm
new file mode 100644
index 0000000..ea80112
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/dml/query-issue205.adm
@@ -0,0 +1 @@
+{ "id": "5678", "stat": { "age": 40, "salary": 100000 }, "deptCode": 16 }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
new file mode 100644
index 0000000..17d8d1d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_01.adm
@@ -0,0 +1 @@
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": null, "Status": "INACTIVE" }, "Timestamp": "Mon Dec 24 13:51:31 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_02.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
new file mode 100644
index 0000000..2fd80d983
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03.adm
@@ -0,0 +1 @@
+{ "DataverseName": "feeds", "DatasetName": "TweetFeed", "DataTypeName": "TweetType", "DatasetType": "FEED", "InternalDetails": null, "ExternalDetails": null, "FeedDetails": { "FileStructure": "BTREE", "PartitioningStrategy": "HASH", "PartitioningKey": [ "id" ], "PrimaryKey": [ "id" ], "GroupName": "DEFAULT_NG_ALL_NODES", "DatasourceAdapter": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Properties": [ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } ], "Function": "feeds.feed_processor@1", "Status": "INACTIVE" }, "Timestamp": "Mon Dec 24 13:49:20 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm
new file mode 100644
index 0000000..2567483
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_04.adm
@@ -0,0 +1,11 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm b/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm
new file mode 100644
index 0000000..9720960
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/issue_230_feeds.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012" }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012" }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012" }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012" }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012" }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ... #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012" }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012" }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012" }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012" }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012" }
diff --git a/asterix-app/src/test/resources/runtimets/results/fuzzyjoin/dblp-aqlplus_2.adm b/asterix-app/src/test/resources/runtimets/results/fuzzyjoin/dblp-aqlplus_2.adm
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/fuzzyjoin/dblp-aqlplus_2.adm
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm
new file mode 100644
index 0000000..d7ae022
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_02.adm
@@ -0,0 +1,5 @@
+{ "word": "am", "count": 1 }
+{ "word": "grover", "count": 1 }
+{ "word": "hi", "count": 1 }
+{ "word": "i", "count": 1 }
+{ "word": "raman", "count": 1 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm
new file mode 100644
index 0000000..1033913
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/hdfs_03.adm
@@ -0,0 +1,93 @@
+{ "word": "a", "count": 68 }
+{ "word": "addressing", "count": 34 }
+{ "word": "an", "count": 34 }
+{ "word": "analyzing", "count": 68 }
+{ "word": "and", "count": 238 }
+{ "word": "areas", "count": 34 }
+{ "word": "asterix", "count": 102 }
+{ "word": "by", "count": 34 }
+{ "word": "cases", "count": 68 }
+{ "word": "clusters", "count": 68 }
+{ "word": "combining", "count": 34 }
+{ "word": "commodity", "count": 34 }
+{ "word": "computing", "count": 102 }
+{ "word": "content", "count": 34 }
+{ "word": "create", "count": 34 }
+{ "word": "data", "count": 238 }
+{ "word": "database", "count": 34 }
+{ "word": "databases", "count": 34 }
+{ "word": "datum", "count": 34 }
+{ "word": "declarative", "count": 34 }
+{ "word": "developing", "count": 34 }
+{ "word": "distinct", "count": 34 }
+{ "word": "each", "count": 34 }
+{ "word": "for", "count": 34 }
+{ "word": "formats", "count": 34 }
+{ "word": "from", "count": 68 }
+{ "word": "generation", "count": 34 }
+{ "word": "highly", "count": 68 }
+{ "word": "ideas", "count": 34 }
+{ "word": "including", "count": 34 }
+{ "word": "indexing", "count": 68 }
+{ "word": "information", "count": 136 }
+{ "word": "ingesting", "count": 34 }
+{ "word": "intensive", "count": 68 }
+{ "word": "irregular", "count": 34 }
+{ "word": "is", "count": 204 }
+{ "word": "issues", "count": 34 }
+{ "word": "large", "count": 68 }
+{ "word": "managing", "count": 34 }
+{ "word": "merging", "count": 34 }
+{ "word": "much", "count": 34 }
+{ "word": "new", "count": 34 }
+{ "word": "next", "count": 34 }
+{ "word": "nothing", "count": 34 }
+{ "word": "of", "count": 136 }
+{ "word": "on", "count": 102 }
+{ "word": "open", "count": 68 }
+{ "word": "parallel", "count": 68 }
+{ "word": "performant", "count": 34 }
+{ "word": "platform", "count": 34 }
+{ "word": "problem", "count": 34 }
+{ "word": "processing", "count": 34 }
+{ "word": "project", "count": 68 }
+{ "word": "quantities", "count": 34 }
+{ "word": "query", "count": 34 }
+{ "word": "querying", "count": 34 }
+{ "word": "range", "count": 34 }
+{ "word": "ranging", "count": 34 }
+{ "word": "regular", "count": 34 }
+{ "word": "research", "count": 34 }
+{ "word": "running", "count": 34 }
+{ "word": "scalable", "count": 34 }
+{ "word": "scales", "count": 34 }
+{ "word": "semi", "count": 170 }
+{ "word": "shared", "count": 34 }
+{ "word": "software", "count": 34 }
+{ "word": "solutions", "count": 34 }
+{ "word": "source", "count": 34 }
+{ "word": "stance", "count": 34 }
+{ "word": "storage", "count": 34 }
+{ "word": "storing", "count": 34 }
+{ "word": "structured", "count": 170 }
+{ "word": "subscribing", "count": 34 }
+{ "word": "support", "count": 34 }
+{ "word": "tagged", "count": 34 }
+{ "word": "taking", "count": 34 }
+{ "word": "targets", "count": 34 }
+{ "word": "techniques", "count": 68 }
+{ "word": "technologies", "count": 34 }
+{ "word": "textual", "count": 34 }
+{ "word": "that", "count": 34 }
+{ "word": "the", "count": 102 }
+{ "word": "three", "count": 34 }
+{ "word": "to", "count": 170 }
+{ "word": "todays", "count": 34 }
+{ "word": "use", "count": 68 }
+{ "word": "vast", "count": 34 }
+{ "word": "very", "count": 34 }
+{ "word": "well", "count": 34 }
+{ "word": "where", "count": 68 }
+{ "word": "wide", "count": 34 }
+{ "word": "with", "count": 34 }
+{ "word": "yet", "count": 34 }
diff --git a/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm b/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm
new file mode 100644
index 0000000..8af2f5f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/hdfs/issue_245_hdfs.adm
@@ -0,0 +1,4 @@
+{ "line": "The ASTERIX project is developing new technologies for ingesting, storing, managing, indexing, querying, analyzing, and subscribing to vast quantities of semi-structured information" }
+{ "line": "The project is combining ideas from three distinct areas semi-structured data, parallel databases, and data-intensive computing to create a next-generation, open source software platform that scales by running on large, shared-nothing commodity computing clusters" }
+{ "line": "ASTERIX targets a wide range of semi-structured information, ranging from data use cases where information is well-tagged and highly regular to content use cases where data is irregular and much of each datum is textual" }
+{ "line": "ASTERIX is taking an open stance on data formats and addressing research issues including highly scalable data storage and indexing, semi-structured query processing on very large clusters, and merging parallel database techniques with todays data-intensive computing techniques to support performant yet declarative solutions to the problem of analyzing semi-structured information" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue134.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue134.adm
new file mode 100644
index 0000000..0d06066
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue134.adm
@@ -0,0 +1 @@
+{{ [ 1, 2, 3, 4, 5 ], [ 6, 5, 3, 8, 9 ], [ 44, 22, 66, -1, 0, 99.9d ] }}
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue166.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue166.adm
new file mode 100644
index 0000000..b5897af
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue166.adm
@@ -0,0 +1 @@
+[ 4, 5, 6, 7 ]
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue208.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue208.adm
new file mode 100644
index 0000000..14c1c18
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue208.adm
@@ -0,0 +1,6 @@
+{ "count": 1, "user": "RollandEckhard#500" }
+{ "count": 2, "user": "RollandEckhardstein#211" }
+{ "count": 1, "user": "RollandEckhardstein#221" }
+{ "count": 1, "user": "RollandEcstein#211" }
+{ "count": 1, "user": "Rolldstein#211" }
+{ "count": 1, "user": "Rolltein#211" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue236.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue236.adm
new file mode 100644
index 0000000..65bdb8f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue236.adm
@@ -0,0 +1 @@
+{ "tweetid": "1111387810", "tweetid-copy": "1111387810", "user": { "screen-name": "TonyNapier#786", "lang": "en", "friends_count": 4241366, "statuses_count": 97, "name": "Tony Napier", "followers_count": 5984113 }, "sender-location": point("29.24,78.35"), "send-time": datetime("2011-11-24T14:24:51.000Z"), "send-time-copy": datetime("2011-11-24T14:24:51.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " love sprint its wireless is mind-blowing:)" }
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue29.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue29.adm
new file mode 100644
index 0000000..3d2819e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue29.adm
@@ -0,0 +1 @@
+{{ { "tweetid": "1023", "user": { "screen-name": "dflynn24", "lang": "en", "friends_count": 46, "statuses_count": 987, "name": "danielle flynn", "followers_count": 47 }, "sender-location": "40.904177,-72.958996", "send-time": "2010-02-21T11:56:02-05:00", "referred-topics": {{ "verizon" }}, "message-text": "i need a #verizon phone like nowwwww! :(" }, { "tweetid": "1024", "user": { "screen-name": "miriamorous", "lang": "en", "friends_count": 69, "statuses_count": 1068, "name": "Miriam Songco", "followers_count": 78 }, "send-time": "2010-02-21T11:11:43-08:00", "referred-topics": {{ "commercials", "verizon", "att" }}, "message-text": "#verizon & #att #commercials, so competitive" }, { "tweetid": "1025", "user": { "screen-name": "dj33", "lang": "en", "friends_count": 96, "statuses_count": 1696, "name": "Don Jango", "followers_count": 22 }, "send-time": "2010-02-21T12:38:44-05:00", "referred-topics": {{ "charlotte" }}, "message-text": "Chillin at dca waiting for 900am flight to #charlotte and from there to providenciales" }, { "tweetid": "1026", "user": { "screen-name": "reallyleila", "lang": "en", "friends_count": 106, "statuses_count": 107, "name": "Leila Samii", "followers_count": 52 }, "send-time": "2010-02-21T21:31:57-06:00", "referred-topics": {{ "verizon", "at&t", "iphone" }}, "message-text": "I think a switch from #verizon to #at&t may be in my near future... my smartphone is like a land line compared to the #iphone!" } }}
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue55-1.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue55-1.adm
new file mode 100644
index 0000000..adf0f33
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue55-1.adm
@@ -0,0 +1,49 @@
+[ 1.1f, 1.1f, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1.1f, 1.0f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.1f, 1.2f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.1f, 0.9d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.1f, 1.3d, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.1f, 1, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.1f, 2, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.0f, 1.1f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.0f, 1.0f, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1.0f, 1.2f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.0f, 0.9d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.0f, 1.3d, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.0f, 1, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1.0f, 2, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.2f, 1.1f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.2f, 1.0f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.2f, 1.2f, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1.2f, 0.9d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.2f, 1.3d, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.2f, 1, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.2f, 2, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 0.9d, 1.1f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 0.9d, 1.0f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 0.9d, 1.2f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 0.9d, 0.9d, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 0.9d, 1.3d, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 0.9d, 1, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 0.9d, 2, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1.3d, 1.1f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.3d, 1.0f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.3d, 1.2f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.3d, 0.9d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.3d, 1.3d, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1.3d, 1, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1.3d, 2, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1, 1.1f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1, 1.0f, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1, 1.2f, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1, 0.9d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 1, 1.3d, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 1, 1, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
+[ 1, 2, "=", false, "<", true, "<=", true, ">", false, ">=", false ]
+[ 2, 1.1f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 2, 1.0f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 2, 1.2f, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 2, 0.9d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 2, 1.3d, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 2, 1, "=", false, "<", false, "<=", false, ">", true, ">=", true ]
+[ 2, 2, "=", true, "<", false, "<=", true, ">", false, ">=", true ]
diff --git a/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue55.adm b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue55.adm
new file mode 100644
index 0000000..d384bce
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/open-closed/query-issue55.adm
@@ -0,0 +1,4 @@
+[ 1, 3 ]
+[ 4, 5, 2 ]
+[ -1, -3, 0 ]
+[ "a" ]
diff --git a/asterix-app/src/test/resources/runtimets/results/records/field-access-on-open-field.adm b/asterix-app/src/test/resources/runtimets/results/records/field-access-on-open-field.adm
new file mode 100644
index 0000000..d4edf93
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/records/field-access-on-open-field.adm
@@ -0,0 +1 @@
+92617
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_1.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++] Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1 Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2 Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+ An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm
new file mode 100644
index 0000000..a7ec8f6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/scan/issue238_query_2.adm
@@ -0,0 +1,100 @@
+{ "id": 1, "dblpid": "books/acm/kim95/AnnevelinkACFHK95", "title": "Object SQL - A Language for the Design and Implementation of Object Databases.", "authors": "Jurgen Annevelink Rafiul Ahad Amelia Carlson Daniel H. Fishman Michael L. Heytens William Kent", "misc": "2002-01-03 42-68 1995 Modern Database Systems db/books/collections/kim95.html#AnnevelinkACFHK95" }
+{ "id": 2, "dblpid": "books/acm/kim95/Blakeley95", "title": "OQL[C++] Extending C++ with an Object Query Capability.", "authors": "José A. Blakeley", "misc": "2002-01-03 69-88 Modern Database Systems db/books/collections/kim95.html#Blakeley95 1995" }
+{ "id": 3, "dblpid": "books/acm/kim95/BreitbartGS95", "title": "Transaction Management in Multidatabase Systems.", "authors": "Yuri Breitbart Hector Garcia-Molina Abraham Silberschatz", "misc": "2004-03-08 573-591 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartGS95 1995" }
+{ "id": 4, "dblpid": "books/acm/kim95/ChristodoulakisK95", "title": "Multimedia Information Systems Issues and Approaches.", "authors": "Stavros Christodoulakis Leonidas Koveos", "misc": "2002-01-03 318-337 1995 Modern Database Systems db/books/collections/kim95.html#ChristodoulakisK95" }
+{ "id": 5, "dblpid": "books/acm/kim95/DayalHW95", "title": "Active Database Systems.", "authors": "Umeshwar Dayal Eric N. Hanson Jennifer Widom", "misc": "2002-01-03 434-456 1995 Modern Database Systems db/books/collections/kim95.html#DayalHW95" }
+{ "id": 6, "dblpid": "books/acm/kim95/DittrichD95", "title": "Where Object-Oriented DBMSs Should Do Better A Critique Based on Early Experiences.", "authors": "Angelika Kotz Dittrich Klaus R. Dittrich", "misc": "2002-01-03 238-254 1995 Modern Database Systems db/books/collections/kim95.html#DittrichD95" }
+{ "id": 7, "dblpid": "books/acm/kim95/Garcia-MolinaH95", "title": "Distributed Databases.", "authors": "Hector Garcia-Molina Meichun Hsu", "misc": "2002-01-03 477-493 1995 Modern Database Systems db/books/collections/kim95.html#Garcia-MolinaH95" }
+{ "id": 8, "dblpid": "books/acm/kim95/Goodman95", "title": "An Object-Oriented DBMS War Story Developing a Genome Mapping Database in C++.", "authors": "Nathan Goodman", "misc": "2002-01-03 216-237 1995 Modern Database Systems db/books/collections/kim95.html#Goodman95" }
+{ "id": 9, "dblpid": "books/acm/kim95/Kaiser95", "title": "Cooperative Transactions for Multiuser Environments.", "authors": "Gail E. Kaiser", "misc": "2002-01-03 409-433 1995 Modern Database Systems db/books/collections/kim95.html#Kaiser95" }
+{ "id": 10, "dblpid": "books/acm/kim95/KelleyGKRG95", "title": "Schema Architecture of the UniSQL/M Multidatabase System", "authors": "William Kelley Sunit K. Gala Won Kim Tom C. Reyes Bruce Graham", "misc": "2004-03-08 Modern Database Systems books/acm/Kim95 621-648 1995 db/books/collections/kim95.html#KelleyGKRG95" }
+{ "id": 11, "dblpid": "books/acm/kim95/KemperM95", "title": "Physical Object Management.", "authors": "Alfons Kemper Guido Moerkotte", "misc": "2002-01-03 175-202 1995 Modern Database Systems db/books/collections/kim95.html#KemperM95" }
+{ "id": 12, "dblpid": "books/acm/kim95/Kim95", "title": "Introduction to Part 1 Next-Generation Database Technology.", "authors": "Won Kim", "misc": "2002-01-03 5-17 1995 Modern Database Systems db/books/collections/kim95.html#Kim95" }
+{ "id": 13, "dblpid": "books/acm/kim95/Kim95a", "title": "Object-Oriented Database Systems Promises, Reality, and Future.", "authors": "Won Kim", "misc": "2002-01-03 255-280 1995 Modern Database Systems db/books/collections/kim95.html#Kim95a" }
+{ "id": 14, "dblpid": "books/acm/kim95/Kim95b", "title": "Introduction to Part 2 Technology for Interoperating Legacy Databases.", "authors": "Won Kim", "misc": "2002-01-03 515-520 1995 Modern Database Systems db/books/collections/kim95.html#Kim95b" }
+{ "id": 15, "dblpid": "books/acm/kim95/KimCGS95", "title": "On Resolving Schematic Heterogeneity in Multidatabase Systems.", "authors": "Won Kim Injun Choi Sunit K. Gala Mark Scheevel", "misc": "2002-01-03 521-550 1995 Modern Database Systems db/books/collections/kim95.html#KimCGS95" }
+{ "id": 16, "dblpid": "books/acm/kim95/KimG95", "title": "Requirements for a Performance Benchmark for Object-Oriented Database Systems.", "authors": "Won Kim Jorge F. Garza", "misc": "2002-01-03 203-215 1995 Modern Database Systems db/books/collections/kim95.html#KimG95" }
+{ "id": 17, "dblpid": "books/acm/kim95/KimK95", "title": "On View Support in Object-Oriented Databases Systems.", "authors": "Won Kim William Kelley", "misc": "2002-01-03 108-129 1995 Modern Database Systems db/books/collections/kim95.html#KimK95" }
+{ "id": 18, "dblpid": "books/acm/kim95/Kowalski95", "title": "The POSC Solution to Managing E&P Data.", "authors": "Vincent J. Kowalski", "misc": "2002-01-03 281-301 1995 Modern Database Systems db/books/collections/kim95.html#Kowalski95" }
+{ "id": 19, "dblpid": "books/acm/kim95/KriegerA95", "title": "C++ Bindings to an Object Database.", "authors": "David Krieger Tim Andrews", "misc": "2002-01-03 89-107 1995 Modern Database Systems db/books/collections/kim95.html#KriegerA95" }
+{ "id": 20, "dblpid": "books/acm/kim95/Lunt95", "title": "Authorization in Object-Oriented Databases.", "authors": "Teresa F. Lunt", "misc": "2002-01-03 130-145 1995 Modern Database Systems db/books/collections/kim95.html#Lunt95" }
+{ "id": 21, "dblpid": "books/acm/kim95/MengY95", "title": "Query Processing in Multidatabase Systems.", "authors": "Weiyi Meng Clement T. Yu", "misc": "2002-01-03 551-572 1995 Modern Database Systems db/books/collections/kim95.html#MengY95" }
+{ "id": 22, "dblpid": "books/acm/kim95/Motro95", "title": "Management of Uncerainty in database Systems.", "authors": "Amihai Motro", "misc": "2002-01-03 457-476 1995 Modern Database Systems db/books/collections/kim95.html#Motro95" }
+{ "id": 23, "dblpid": "books/acm/kim95/Omiecinski95", "title": "Parallel Relational Database Systems.", "authors": "Edward Omiecinski", "misc": "2002-01-03 494-512 1995 Modern Database Systems db/books/collections/kim95.html#Omiecinski95" }
+{ "id": 24, "dblpid": "books/acm/kim95/OzsuB95", "title": "Query Processing in Object-Oriented Database Systems.", "authors": "M. Tamer Özsu José A. Blakeley", "misc": "2002-01-03 146-174 1995 Modern Database Systems db/books/collections/kim95.html#OzsuB95" }
+{ "id": 25, "dblpid": "books/acm/kim95/RusinkiewiczS95", "title": "Specification and Execution of Transactional Workflows.", "authors": "Marek Rusinkiewicz Amit P. Sheth", "misc": "2004-03-08 592-620 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#RusinkiewiczS95 1995" }
+{ "id": 26, "dblpid": "books/acm/kim95/Samet95", "title": "Spatial Data Structures.", "authors": "Hanan Samet", "misc": "2004-03-08 361-385 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Samet95 1995" }
+{ "id": 27, "dblpid": "books/acm/kim95/SametA95", "title": "Spatial Data Models and Query Processing.", "authors": "Hanan Samet Walid G. Aref", "misc": "2002-01-03 338-360 1995 Modern Database Systems db/books/collections/kim95.html#SametA95" }
+{ "id": 28, "dblpid": "books/acm/kim95/ShanADDK95", "title": "Pegasus A Heterogeneous Information Management System.", "authors": "Ming-Chien Shan Rafi Ahmed Jim Davis Weimin Du William Kent", "misc": "2004-03-08 664-682 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#ShanADDK95 1995" }
+{ "id": 29, "dblpid": "books/acm/kim95/Snodgrass95", "title": "Temporal Object-Oriented Databases A Critical Comparison.", "authors": "Richard T. Snodgrass", "misc": "2002-01-03 386-408 1995 Modern Database Systems db/books/collections/kim95.html#Snodgrass95" }
+{ "id": 30, "dblpid": "books/acm/kim95/SoleyK95", "title": "The OMG Object Model.", "authors": "Richard Mark Soley William Kent", "misc": "2002-01-03 18-41 1995 Modern Database Systems db/books/collections/kim95.html#SoleyK95" }
+{ "id": 31, "dblpid": "books/acm/kim95/Stout95", "title": "EDA/SQL.", "authors": "Ralph L. Stout", "misc": "2004-03-08 649-663 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#Stout95 1995" }
+{ "id": 32, "dblpid": "books/acm/kim95/Thompson95", "title": "The Changing Database Standards Landscape.", "authors": "Craig W. Thompson", "misc": "2002-01-03 302-317 1995 Modern Database Systems db/books/collections/kim95.html#Thompson95" }
+{ "id": 33, "dblpid": "books/acm/kim95/BreitbartR95", "title": "Overview of the ADDS System.", "authors": "Yuri Breitbart Tom C. Reyes", "misc": "2009-06-12 683-701 Modern Database Systems books/acm/Kim95 db/books/collections/kim95.html#BreitbartR95 1995" }
+{ "id": 34, "dblpid": "books/acm/Kim95", "title": "Modern Database Systems The Object Model, Interoperability, and Beyond.", "authors": "", "misc": "2004-03-08 Won Kim Modern Database Systems ACM Press and Addison-Wesley 1995 0-201-59098-0 db/books/collections/kim95.html" }
+{ "id": 35, "dblpid": "books/ap/MarshallO79", "title": "Inequalities Theory of Majorization and Its Application.", "authors": "Albert W. Marshall Ingram Olkin", "misc": "2002-01-03 Academic Press 1979 0-12-473750-1" }
+{ "id": 36, "dblpid": "books/aw/kimL89/BjornerstedtH89", "title": "Version Control in an Object-Oriented Architecture.", "authors": "Anders Björnerstedt Christer Hulten", "misc": "2006-02-24 451-485 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BjornerstedtH89" }
+{ "id": 37, "dblpid": "books/aw/kimL89/BretlMOPSSWW89", "title": "The GemStone Data Management System.", "authors": "Robert Bretl David Maier Allen Otis D. Jason Penney Bruce Schuchardt Jacob Stein E. Harold Williams Monty Williams", "misc": "2002-01-03 283-308 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#BretlMOPSSWW89" }
+{ "id": 38, "dblpid": "books/aw/kimL89/CareyDRS89", "title": "Storage Management in EXODUS.", "authors": "Michael J. Carey David J. DeWitt Joel E. Richardson Eugene J. Shekita", "misc": "2002-01-03 341-369 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#CareyDRS89" }
+{ "id": 39, "dblpid": "books/aw/kimL89/Decouchant89", "title": "A Distributed Object Manager for the Smalltalk-80 System.", "authors": "Dominique Decouchant", "misc": "2002-01-03 487-520 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Decouchant89" }
+{ "id": 40, "dblpid": "books/aw/kimL89/DiederichM89", "title": "Objects, Messages, and Rules in Database Design.", "authors": "Jim Diederich Jack Milton", "misc": "2002-01-03 177-197 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#DiederichM89" }
+{ "id": 41, "dblpid": "books/aw/kimL89/EllisG89", "title": "Active Objects Ealities and Possibilities.", "authors": "Clarence A. Ellis Simon J. Gibbs", "misc": "2002-01-03 561-572 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#EllisG89" }
+{ "id": 42, "dblpid": "books/aw/kimL89/FishmanABCCDHHKLLMNRSW89", "title": "Overview of the Iris DBMS.", "authors": "Daniel H. Fishman Jurgen Annevelink David Beech E. C. Chow Tim Connors J. W. Davis Waqar Hasan C. G. Hoch William Kent S. Leichner Peter Lyngbæk Brom Mahbod Marie-Anne Neimat Tore Risch Ming-Chien Shan W. Kevin Wilkinson", "misc": "2002-01-03 219-250 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#FishmanABCCDHHKLLMNRSW89" }
+{ "id": 43, "dblpid": "books/aw/kimL89/KimBCGW89", "title": "Features of the ORION Object-Oriented Database System.", "authors": "Won Kim Nat Ballou Hong-Tai Chou Jorge F. Garza Darrell Woelk", "misc": "2002-01-03 251-282 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimBCGW89" }
+{ "id": 44, "dblpid": "books/aw/kimL89/KimKD89", "title": "Indexing Techniques for Object-Oriented Databases.", "authors": "Won Kim Kyung-Chang Kim Alfred G. Dale", "misc": "2002-01-03 371-394 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#KimKD89" }
+{ "id": 45, "dblpid": "books/aw/kimL89/King89", "title": "My Cat Is Object-Oriented.", "authors": "Roger King", "misc": "2002-01-03 23-30 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#King89" }
+{ "id": 46, "dblpid": "books/aw/kimL89/Maier89", "title": "Making Database Systems Fast Enough for CAD Applications.", "authors": "David Maier", "misc": "2002-01-03 573-582 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Maier89" }
+{ "id": 47, "dblpid": "books/aw/kimL89/MellenderRS89", "title": "Optimizing Smalltalk Message Performance.", "authors": "Fred Mellender Steve Riegel Andrew Straw", "misc": "2002-01-03 423-450 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#MellenderRS89" }
+{ "id": 48, "dblpid": "books/aw/kimL89/Moon89", "title": "The Common List Object-Oriented Programming Language Standard.", "authors": "David A. Moon", "misc": "2002-01-03 49-78 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moon89" }
+{ "id": 49, "dblpid": "books/aw/kimL89/Moss89", "title": "Object Orientation as Catalyst for Language-Database Inegration.", "authors": "J. Eliot B. Moss", "misc": "2002-01-03 583-592 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Moss89" }
+{ "id": 50, "dblpid": "books/aw/kimL89/Nierstrasz89", "title": "A Survey of Object-Oriented Concepts.", "authors": "Oscar Nierstrasz", "misc": "2002-01-03 3-21 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Nierstrasz89" }
+{ "id": 51, "dblpid": "books/aw/kimL89/NierstraszT89", "title": "Integrated Office Systems.", "authors": "Oscar Nierstrasz Dennis Tsichritzis", "misc": "2002-01-03 199-215 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#NierstraszT89" }
+{ "id": 52, "dblpid": "books/aw/kimL89/Russinoff89", "title": "Proteus A Frame-Based Nonmonotonic Inference System.", "authors": "David M. Russinoff", "misc": "2002-01-03 127-150 Object-Oriented Concepts, Databases, and Applications ACM Press and Addison-Wesley 1989 db/books/collections/kim89.html#Russinoff89" }
+{ "id": 53, "dblpid": "books/aw/kimL89/SkarraZ89", "title": "Concurrency Control and Object-Oriented Databases.", "authors": "Andrea H. Skarra Stanley B. Zdonik", "misc": "2002-01-03 395-421 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SkarraZ89" }
+{ "id": 54, "dblpid": "books/aw/kimL89/SteinLU89", "title": "A Shared View of Sharing The Treaty of Orlando.", "authors": "Lynn Andrea Stein Henry Lieberman David Ungar", "misc": "2002-01-03 31-48 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#SteinLU89" }
+{ "id": 55, "dblpid": "books/aw/kimL89/TarltonT89", "title": "Pogo A Declarative Representation System for Graphics.", "authors": "Mark A. Tarlton P. Nong Tarlton", "misc": "2002-01-03 151-176 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TarltonT89" }
+{ "id": 56, "dblpid": "books/aw/kimL89/TomlinsonS89", "title": "Concurrent Object-Oriented Programming Languages.", "authors": "Chris Tomlinson Mark Scheevel", "misc": "2002-01-03 79-124 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TomlinsonS89" }
+{ "id": 57, "dblpid": "books/aw/kimL89/TsichritzisN89", "title": "Directions in Object-Oriented Research.", "authors": "Dennis Tsichritzis Oscar Nierstrasz", "misc": "2002-01-03 523-536 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#TsichritzisN89" }
+{ "id": 58, "dblpid": "books/aw/kimL89/Wand89", "title": "A Proposal for a Formal Model of Objects.", "authors": "Yair Wand", "misc": "2002-01-03 537-559 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#Wand89" }
+{ "id": 59, "dblpid": "books/aw/kimL89/WeiserL89", "title": "OZ+ An Object-Oriented Database System.", "authors": "Stephen P. Weiser Frederick H. Lochovsky", "misc": "2002-01-03 309-337 1989 Object-Oriented Concepts, Databases, and Applications db/books/collections/kim89.html#WeiserL89" }
+{ "id": 60, "dblpid": "books/aw/stonebraker86/RoweS86", "title": "The Commercial INGRES Epilogue.", "authors": "Lawrence A. Rowe Michael Stonebraker", "misc": "2002-01-03 63-82 1986 The INGRES Papers db/books/collections/Stonebraker86.html#RoweS86 db/books/collections/Stonebraker86/RoweS86.html ingres/P063.pdf" }
+{ "id": 61, "dblpid": "books/aw/stonebraker86/Stonebraker86", "title": "Design of Relational Systems (Introduction to Section 1).", "authors": "Michael Stonebraker", "misc": "2002-01-03 1-3 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86 db/books/collections/Stonebraker86/Stonebraker86.html ingres/P001.pdf" }
+{ "id": 62, "dblpid": "books/aw/stonebraker86/Stonebraker86a", "title": "Supporting Studies on Relational Systems (Introduction to Section 2).", "authors": "Michael Stonebraker", "misc": "2002-01-03 83-85 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86a db/books/collections/Stonebraker86/Stonebraker86a.html ingres/P083.pdf" }
+{ "id": 63, "dblpid": "books/aw/stonebraker86/Stonebraker86b", "title": "Distributed Database Systems (Introduction to Section 3).", "authors": "Michael Stonebraker", "misc": "2002-01-03 183-186 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86b db/books/collections/Stonebraker86/Stonebraker86b.html ingres/P183.pdf" }
+{ "id": 64, "dblpid": "books/aw/stonebraker86/Stonebraker86c", "title": "The Design and Implementation of Distributed INGRES.", "authors": "Michael Stonebraker", "misc": "2002-01-03 187-196 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86c db/books/collections/Stonebraker86/Stonebraker86c.html ingres/P187.pdf" }
+{ "id": 65, "dblpid": "books/aw/stonebraker86/Stonebraker86d", "title": "User Interfaces for Database Systems (Introduction to Section 4).", "authors": "Michael Stonebraker", "misc": "2002-01-03 243-245 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86d db/books/collections/Stonebraker86/Stonebraker86d.html ingres/P243.pdf" }
+{ "id": 66, "dblpid": "books/aw/stonebraker86/Stonebraker86e", "title": "Extended Semantics for the Relational Model (Introduction to Section 5).", "authors": "Michael Stonebraker", "misc": "2002-01-03 313-316 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86e db/books/collections/Stonebraker86/Stonebraker86e.html ingres/P313.pdf" }
+{ "id": 67, "dblpid": "books/aw/stonebraker86/Stonebraker86f", "title": "Database Design (Introduction to Section 6).", "authors": "Michael Stonebraker", "misc": "2002-01-03 393-394 1986 The INGRES Papers db/books/collections/Stonebraker86.html#Stonebraker86f db/books/collections/Stonebraker86/Stonebraker86f.html ingres/P393.pdf" }
+{ "id": 68, "dblpid": "books/aw/stonebraker86/X86", "title": "Title, Preface, Contents.", "authors": "", "misc": "2002-01-03 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86 db/books/collections/Stonebraker86/X86.html ingres/frontmatter.pdf" }
+{ "id": 69, "dblpid": "books/aw/stonebraker86/X86a", "title": "References.", "authors": "", "misc": "2002-01-03 429-444 1986 The INGRES Papers db/books/collections/Stonebraker86.html#X86a db/books/collections/Stonebraker86/X86a.html ingres/P429.pdf" }
+{ "id": 70, "dblpid": "books/aw/Knuth86a", "title": "TeX The Program", "authors": "Donald E. Knuth", "misc": "2002-01-03 Addison-Wesley 1986 0-201-13437-3" }
+{ "id": 71, "dblpid": "books/aw/AbiteboulHV95", "title": "Foundations of Databases.", "authors": "Serge Abiteboul Richard Hull Victor Vianu", "misc": "2002-01-03 Addison-Wesley 1995 0-201-53771-0 AHV/Toc.pdf ... ... journals/tods/AstrahanBCEGGKLMMPTWW76 books/bc/AtzeniA93 journals/tcs/AtzeniABM82 journals/jcss/AbiteboulB86 journals/csur/AtkinsonB87 conf/pods/AtzeniB87 journals/vldb/AbiteboulB95 conf/sigmod/AbiteboulB91 conf/dood/AtkinsonBDDMZ89 conf/vldb/AlbanoBGO93 ... conf/icdt/Abiteboul88 journals/ipl/Abiteboul89 conf/ds/Abrial74 journals/tods/AhoBU79 books/mk/minker88/AptBW88 conf/vldb/AroraC78 conf/stoc/AfratiC89 journals/tods/AlbanoCO85 conf/pods/AfratiCY91 conf/pods/AusielloDM85 conf/vldb/AbiteboulG85 journals/jacm/AjtaiG87 conf/focs/AjtaiG89 journals/tods/AbiteboulG91 ... ... journals/tods/AbiteboulH87 conf/sigmod/AbiteboulH88 ... conf/sigmod/AbiteboulK89 journals/tcs/AbiteboulKG91 journals/jcss/AbiteboulKRW95 conf/sigmod/AbiteboulLUW93 conf/pods/AtzeniP82 conf/pods/AfratiP87 conf/pods/AptP87 conf/wg/AndriesP91 conf/pods/AfratiPPRSU86 books/el/leeuwen90/Apt90 conf/ifip/Armstrong74 journals/siamcomp/AhoSSU81 journals/tods/AhoSU79 journals/siamcomp/AhoSU79 conf/pods/AbiteboulSV90 journals/is/AtzeniT93 conf/popl/AhoU79 conf/pods/AbiteboulV87 conf/jcdkb/AbiteboulV88 journals/jacm/AbiteboulV88 conf/pods/AbiteboulV88 journals/jacm/AbiteboulV89 journals/jcss/AbiteboulV90 journals/jcss/AbiteboulV91 conf/stoc/AbiteboulV91 journals/amai/AbiteboulV91 journals/jcss/AbiteboulV95 journals/jacm/AptE82 conf/coco/AbiteboulVV92 conf/iclp/AptB88 conf/oopsla/BobrowKKMSZ86 journals/tse/BatoryBGSTTW88 conf/mfcs/Bancilhon78 ... conf/db-workshops/Bancilhon85 books/el/leeuwen90/Barendregt90 ... journals/tods/BeeriB79 books/el/leeuwen90/BerstelB90 conf/icdt/BeneventanoB92 conf/vldb/BernsteinBC80 conf/vldb/BeeriBG78 conf/sigmod/BorgidaBMR89 journals/tods/BunemanC79 journals/jacm/BernsteinC81 conf/dbpl/BancilhonCD89 books/bc/tanselCGSS93/BaudinetCW93 conf/sigmod/BiskupDB79 journals/jacm/BeeriDFS84 books/mk/BancilhonDK92 conf/edbt/BryDM88 conf/pods/BunemanDW88 journals/jcss/BunemanDW91 journals/tods/Beeri80 journals/dke/Beeri90 ... journals/tods/Bernstein76 conf/lics/BidoitF87 journals/iandc/BidoitF91 conf/sigmod/BeeriFH77 conf/stoc/BeeriFMMUY81 journals/jacm/BeeriFMY83 journals/tods/BunemanFN82 journals/siamcomp/BernsteinG81 journals/iandc/BlassGK85 conf/ijcai/BrachmanGL85 journals/tods/BernsteinGWRR81 books/aw/BernsteinHG87 ... journals/tcs/Bidoit91 journals/tcs/Biskup80 conf/adbt/Biskup79 journals/tods/Biskup83 journals/tcs/BunemanJO91 journals/tods/BeeriK86 conf/pods/BeeriKBR87 conf/icdt/BidoitL90 journals/csur/BatiniL86 conf/sigmod/BlakeleyLT86 conf/vldb/BeeriM91 conf/sigmod/BlakeleyMG93 journals/siamcomp/BeeriMSU81 conf/pods/BancilhonMSU86 conf/pods/BeeriNRST87 journals/software/Borgida85 conf/icalp/BraP83 conf/fgcs/BalbinMR88 ... conf/pods/BeeriR87 journals/jlp/BalbinR87 conf/sigmod/BancilhonR86 books/mk/minker88/BancilhonR88 journals/jlp/BeeriR91 conf/vldb/BancilhonRS82 conf/pods/BeeriRSS92 conf/dood/Bry89 journals/tods/BancilhonS81 journals/cogsci/BrachmanS85 journals/tods/BergamaschiS92 conf/sigmod/BernsteinST75 conf/dbpl/TannenBN91 conf/icdt/TannenBW92 ... journals/jacm/BeeriV84 conf/icalp/BeeriV81 conf/adbt/BeeriV79 journals/siamcomp/BeeriV84 journals/iandc/BeeriV84 journals/jacm/BeeriV84 journals/tcs/BeeriV85 journals/ibmrd/ChamberlinAEGLMRW76 ... journals/iandc/Cardelli88 books/mk/Cattell94 conf/sigmod/CacaceCCTZ90 conf/vldb/CastilhoCF82 conf/adbt/CasanovaF82 conf/focs/CaiFI89 journals/jcss/CasanovaFP84 conf/stoc/CosmadakisGKV88 conf/dood/CorciuloGP93 books/sp/CeriGT90 conf/focs/ChandraH80 journals/jcss/ChandraH80 journals/jcss/ChandraH82 journals/jlp/ChandraH85 conf/popl/Chandra81 conf/adbt/Chang79 conf/pods/Chandra88 ... journals/tods/Chen76 conf/ride/ChenHM94 conf/icde/Chomicki92 conf/pods/Chomicki92 ... ... ... conf/stoc/CosmadakisK85 journals/acr/CosmadakisK86 ... journals/jcss/CosmadakisKS86 journals/jacm/CosmadakisKV90 ... conf/pods/CalvaneseL94 conf/adbt/Clark77 conf/stoc/ChandraLM81 conf/stoc/ChandraM77 conf/pods/ConsensM90 conf/sigmod/ConsensM93 conf/icdt/ConsensM90 journals/cacm/Codd70 conf/sigmod/Codd71a persons/Codd71a persons/Codd72 conf/ifip/Codd74 ... conf/sigmod/Codd79 journals/cacm/Codd82 ... conf/sigmod/Cohen89 journals/cacm/Cohen90 ... journals/jcss/Cook74 conf/pods/Cosmadakis83 conf/focs/Cosmadakis87 books/el/leeuwen90/Courcelle90a journals/jacm/CosmadakisP84 conf/edbt/CeriCGLLTZ88 ... conf/vldb/CeriT87 conf/vldb/CasanovaTF88 ... conf/pods/CasanovaV83 journals/siamcomp/ChandraV85 conf/pods/ChaudhuriV92 conf/pods/ChaudhuriV93 conf/pods/ChaudhuriV94 journals/csur/CardelliW85 conf/pods/ChenW89 conf/pods/CohenW89 conf/vldb/CeriW90 conf/vldb/CeriW91 conf/iclp/ChenW92 conf/vldb/CeriW93 ... conf/birthday/Dahlhaus87 conf/vldb/Date81 books/aw/Date86 ... conf/dbpl/Dayal89 journals/tods/DayalB82 journals/ibmrd/DelobelC73 conf/icde/DelcambreD89 ... journals/tods/Delobel78 journals/jacm/Demolombe92 journals/tods/DateF92 ... conf/vldb/DayalHL91 journals/jacm/Paola69a conf/caap/DahlhausM86 journals/acr/DAtriM86 journals/iandc/DahlhausM92 conf/sigmod/DerrMP93 conf/vldb/MaindrevilleS88 conf/pods/Dong92 conf/adbt/BraP82 ... conf/dbpl/DongS91 journals/iandc/DongS95 conf/dbpl/DongS93 conf/dbpl/DongS93 conf/icdt/DongT92 conf/vldb/DenninghoffV91 conf/pods/DenninghoffV93 ... ... books/acm/kim95/DayalHW95 ... conf/pods/EiterGM94 conf/pods/Escobar-MolanoHJ93 ... books/el/leeuwen90/Emerson90 books/bc/ElmasriN89 ... conf/icse/Eswaran76 conf/sigmod/EpsteinSW78 ... ... conf/vldb/Fagin77 journals/tods/Fagin77 conf/sigmod/Fagin79 journals/tods/Fagin81 journals/ipl/FaginV83 journals/jacm/Fagin82 journals/jacm/Fagin83 journals/tcs/Fagin93 books/sp/kimrb85/FurtadoC85 ... journals/jlp/Fitting85a journals/tcs/FischerJT83 journals/acr/FaginKUV86 conf/icdt/FernandezM92 journals/tods/FaginMU82 conf/vldb/FaloutsosNS91 ... journals/ai/Forgy82 ... conf/sigmod/Freytag87 ... journals/siamcomp/FischerT83 journals/siamcomp/FaginMUY83 conf/pods/FaginUV83 conf/icalp/FaginV84 ... ... ... ... conf/sigmod/GraefeD87 conf/ride/GatziuD94 conf/sigmod/GardarinM86 conf/sigmod/GyssensG88 journals/tcs/GinsburgH83a journals/jacm/GinsburgH86 ... books/bc/tanselCGSS93/Ginsburg93 books/fm/GareyJ79 journals/jacm/GrantJ82 conf/vldb/GehaniJ91 conf/vldb/GhandeharizadehHJCELLTZ93 journals/tods/GhandeharizadehHJ96 conf/vldb/GehaniJS92 ... conf/sigmod/GehaniJS92 ... conf/deductive/GuptaKM92 conf/pods/GurevichL82 conf/iclp/GelfondL88 conf/adbt/77 journals/csur/GallaireMN84 conf/pods/GrahneMR92 conf/sigmod/GuptaMS93 conf/lics/GaifmanMSV87 journals/jacm/GaifmanMSV93 journals/jacm/GrahamMV86 conf/csl/GradelO92 ... conf/pods/Gottlob87 conf/pods/GyssensPG90 conf/dood/GiannottiPSZ91 books/aw/GoldbergR83 journals/acr/GrahneR86 journals/ipl/Grant77 ... journals/iandc/Grandjean83 conf/vldb/Grahne84 ... journals/csur/Graefe93 books/sp/Greibach75 journals/tods/GoodmanS82 journals/jcss/GoodmanS84 conf/focs/GurevichS85 ... conf/pods/GrumbachS94 conf/sigmod/GangulyST90 ... journals/tcs/Gunter92 ... ... ... ... conf/pods/GrahamV84 conf/pods/GrumbachV91 conf/icde/GardarinV92 conf/sigmod/GraefeW89 ... journals/jacm/GinsburgZ82 conf/vldb/GottlobZ88 ... ... journals/sigmod/Hanson89 ... journals/cacm/Harel80 journals/tkde/HaasCLMWLLPCS90 conf/lics/Hella92 journals/iandc/Herrmann95 conf/pods/HirstH93 conf/vldb/HullJ91 conf/ewdw/HullJ90 journals/csur/HullK87 journals/tods/HudsonK89 conf/lics/HillebrandKM93 conf/nato/HillebrandKR93 conf/jcdkb/HsuLM88 journals/ipl/HoneymanLY80 journals/tods/HammerM81 conf/adbt/HenschenMN82 ... journals/jacm/HenschenN84 journals/jacm/Honeyman82 conf/sigmod/HullS89 conf/pods/HullS89 journals/acta/HullS94 journals/jcss/HullS93 conf/fodo/HullTY89 journals/jcss/Hull83 journals/jacm/Hull84 journals/tcs/Hull85 journals/siamcomp/Hull86 ... conf/vldb/Hulin89 ... journals/jacm/HullY84 conf/vldb/HullY90 conf/pods/HullY91 conf/sigmod/IoannidisK90 journals/jcss/ImielinskiL84 conf/adbt/Imielinski82 journals/jcss/Immerman82 journals/iandc/Immerman86 ... journals/siamcomp/Immerman87 conf/pods/ImielinskiN88 conf/vldb/IoannidisNSS92 conf/sigmod/ImielinskiNV91 conf/dood/ImielinskiNV91 conf/vldb/Ioannidis85 journals/jacm/Jacobs82 conf/dbpl/JacobsH91 journals/csur/JarkeK84 journals/jcss/JohnsonK84 conf/popl/JaffarL87 books/el/leeuwen90/Johnson90 journals/jacm/Joyner76 conf/pods/JaeschkeS82 ... books/mk/minker88/Kanellakis88 books/el/leeuwen90/Kanellakis90 conf/oopsla/KhoshafianC86 conf/edbt/KotzDM88 conf/jcdkb/Keller82 conf/pods/Keller85 journals/computer/Keller86 ... journals/tods/Kent79 ... journals/ngc/RohmerLK86 conf/tacs/KanellakisG94 conf/jcdkb/Kifer88 conf/pods/KanellakisKR90 conf/sigmod/KiferKS92 ... conf/icdt/KiferL86 books/aw/KimL89 ... journals/tods/Klug80 journals/jacm/Klug82 journals/jacm/Klug88 journals/jacm/KiferLW95 conf/kr/KatsunoM91 journals/ai/KatsunoM92 conf/jcdkb/KrishnamurthyN88 journals/csur/Knight89 ... journals/iandc/Kolaitis91 journals/ai/Konolige88 conf/ifip/Kowalski74 journals/jacm/Kowalski75 conf/bncod/Kowalski84 conf/vldb/KoenigP81 journals/tods/KlugP82 ... conf/pods/KolaitisP88 conf/pods/KiferRS88 conf/sigmod/KrishnamurthyRS88 books/mg/SilberschatzK91 conf/iclp/KempT88 conf/sigmod/KellerU84 conf/dood/Kuchenhoff91 ... journals/jlp/Kunen87 conf/iclp/Kunen88 conf/pods/Kuper87 conf/pods/Kuper88 conf/ppcp/Kuper93 conf/pods/KuperV84 conf/stoc/KolaitisV87 journals/tcs/KarabegV90 journals/iandc/KolaitisV90 conf/pods/KolaitisV90 journals/tods/KarabegV91 journals/iandc/KolaitisV92 journals/tcs/KuperV93 journals/tods/KuperV93 journals/tse/KellerW85 conf/pods/KiferW89 conf/jcdkb/Lang88 books/el/Leeuwen90 ... journals/jcss/Leivant89 ... journals/iandc/Leivant90 ... conf/db-workshops/Levesque82 journals/ai/Levesque84 conf/mfdbs/Libkin91 conf/er/Lien79 journals/jacm/Lien82 books/mk/minker88/Lifschitz88 ... journals/tcs/Lindell91 journals/tods/Lipski79 journals/jacm/Lipski81 journals/tcs/LeratL86 journals/cj/LeveneL90 books/sp/Lloyd87 conf/pods/LakshmananM89 conf/tlca/LeivantM93 conf/sigmod/LaverMG83 conf/pods/LiptonN90 journals/jcss/LucchesiO78 conf/sigmod/Lohman88 ... conf/ijcai/Lozinskii85 books/ph/LewisP81 ... conf/sigmod/LecluseRV88 journals/is/LipeckS87 journals/jlp/LloydST87 journals/tods/LingTK81 conf/sigmod/LyngbaekV87 conf/dood/LefebvreV89 conf/pods/LibkinW93 conf/dbpl/LibkinW93 journals/jacm/Maier80 books/cs/Maier83 ... conf/vldb/Makinouchi77 conf/icalp/Makowsky81 ... conf/icdt/Malvestuto86 conf/aaai/MacGregorB92 journals/tods/MylopoulosBW80 conf/sigmod/McCarthyD89 journals/csur/MishraE92 conf/sigmod/MumickFPR90 books/mk/Minker88 journals/jlp/Minker88 conf/vldb/MillerIR93 journals/is/MillerIR94 journals/iandc/Mitchell83 conf/pods/Mitchell83 conf/vldb/MendelzonM79 journals/tods/MaierMS79 journals/jcss/MaierMSU80 conf/pods/MendelzonMW94 journals/debu/MorrisNSUG87 journals/ai/Moore85 conf/vldb/Morgenstern83 conf/pods/Morris88 ... conf/pods/MannilaR85 ... journals/jlp/MinkerR90 books/aw/MannilaR92 journals/acr/MaierRW86 ... journals/tods/MarkowitzS92 conf/pods/Marchetti-SpaccamelaPS87 journals/jacm/MaierSY81 conf/iclp/MorrisUG86 journals/tods/MaierUV84 conf/iclp/MorrisUG86 journals/acta/MakowskyV86 books/bc/MaierW88 books/mk/minker88/ManchandraW88 conf/pods/Naughton86 conf/sigmod/NgFS91 ... conf/vldb/Nejdl87 conf/adbt/NicolasM77 conf/sigmod/Nicolas78 journals/acta/Nicolas82 conf/ds/76 conf/pods/NaqviK88 journals/tods/NegriPS91 conf/vldb/NaughtonRSU89 conf/pods/NaughtonS87 ... ... conf/vldb/Osborn79 ... journals/tods/OzsoyogluY87 conf/adbt/Paige82 ... books/cs/Papadimitriou86 ... journals/ipl/Paredaens78 ... books/sp/ParedaensBGG89 journals/ai/Andersen91 books/el/leeuwen90/Perrin90 journals/ins/Petrov89 conf/pods/ParedaensG88 conf/pods/PatnaikI94 conf/adbt/ParedaensJ79 journals/csur/PeckhamM88 ... ... conf/sigmod/ParkerP80 ... conf/iclp/Przymusinski88 conf/pods/Przymusinski89 ... conf/vldb/ParkerSV92 conf/aaai/PearlV87 journals/ai/PereiraW80a conf/pods/PapadimitriouY92 journals/tkde/QianW91 ... journals/jlp/Ramakrishnan91 conf/pods/RamakrishnanBS87 ... conf/adbt/Reiter77 journals/ai/Reiter80 conf/db-workshops/Reiter82 journals/jacm/Reiter86 journals/tods/Rissanen77 conf/mfcs/Rissanen78 conf/pods/Rissanen82 ... journals/ngc/RohmerLK86 journals/jacm/Robinson65 ... conf/pods/Ross89 ... ... conf/sigmod/RoweS79 conf/sigmod/RichardsonS91 journals/debu/RamamohanaraoSBPNTZD87 conf/vldb/RamakrishnanSS92 conf/sigmod/RamakrishnanSSS93 conf/pods/RamakrishnanSUV89 journals/jcss/RamakrishnanSUV93 journals/jlp/RamakrishnanU95 conf/sigmod/SelingerACLP79 conf/sigmod/Sagiv81 journals/tods/Sagiv83 books/mk/minker88/Sagiv88 conf/slp/Sagiv90 conf/sigmod/Sciore81 journals/jacm/Sciore82 conf/pods/Sciore83 journals/acr/Sciore86 journals/jacm/SagivDPF81 conf/pods/X89 ... journals/ai/SmithG85 books/mk/minker88/Shepherdson88 journals/tods/Shipman81 conf/pods/Shmueli87 conf/iclp/SekiI88 conf/sigmod/ShmueliI84 journals/tc/Sickel76 journals/jsc/Siekmann89 conf/sigmod/StonebrakerJGP90 conf/vldb/SimonKM92 journals/csur/ShethL90 conf/pods/SeibL91 conf/sigmod/SuLRD93 conf/adbt/SilvaM79 journals/sigmod/Snodgrass90 journals/sigmod/Soo91 conf/pods/SuciuP94 conf/sigmod/StonebrakerR86 conf/slp/SudarshanR93 conf/pods/SagivS86 journals/cacm/Stonebraker81 books/mk/Stonebraker88 journals/tkde/Stonebraker92 books/aw/Stroustrup91 journals/jacm/SadriU82 conf/vldb/Su91 conf/pods/SagivV89 journals/jacm/SagivW82 journals/tods/StonebrakerWKH76 journals/jacm/SagivY80 conf/pods/SaccaZ86 journals/tcs/SaccaZ88 ... conf/pods/SaccaZ90 ... ... books/bc/TanselCGJSS93 ... journals/acr/ThomasF86 ... ... ... ... journals/tcs/Topor87 ... books/mk/minker88/ToporS88 ... journals/siamcomp/TarjanY84 journals/csur/TeoreyYF86 journals/algorithmica/UllmanG88 conf/pods/Ullman82 books/cs/Ullman82 journals/tods/Ullman85 books/cs/Ullman88 conf/pods/Ullman89 books/cs/Ullman89 conf/sigmod/Gelder86 ... conf/pods/BusscheG92 conf/focs/BusscheGAG92 conf/pods/BusscheP91 conf/slp/Gelder86 conf/pods/Gelder89 conf/pods/GelderRS88 journals/jacm/GelderRS91 journals/tods/GelderT91 journals/ipl/Vardi81 conf/stoc/Vardi82 conf/focs/Vardi82 journals/acta/Vardi83 journals/jcss/Vardi84 conf/pods/Vardi85 conf/pods/Vardi86 journals/jcss/Vardi86 ... conf/pods/Vardi88 conf/sigmod/Vassiliou79 ... ... journals/jacm/EmdenK76 conf/nf2/SchollABBGPRV87 journals/jacm/Vianu87 journals/acta/Vianu87 conf/eds/Vieille86 conf/iclp/Vieille87 ... conf/eds/Vieille88 journals/tcs/Vieille89 ... journals/tcs/VianuV92 conf/sigmod/WidomF90 conf/icde/WangH92 conf/pos/WidjojoHW90 journals/computer/Wiederhold92 conf/pods/Wilkins86 conf/pods/Winslett88 conf/sigmod/WolfsonO90 conf/pods/Wong93 conf/sigmod/WolfsonS88 journals/ibmrd/WangW75 journals/tods/WongY76 conf/vldb/Yannakakis81 journals/csur/YuC84 ... journals/jcss/YannakakisP82 ... journals/tods/Zaniolo82 journals/jcss/Zaniolo84 ... conf/edbt/ZhouH90 journals/ibmsj/Zloof77 books/mk/ZdonikM90 db/books/dbtext/abiteboul95.html" }
+{ "id": 72, "dblpid": "books/aw/Lamport86", "title": "LaTeX User's Guide & Reference Manual", "authors": "Leslie Lamport", "misc": "2002-01-03 Addison-Wesley 1986 0-201-15790-X" }
+{ "id": 73, "dblpid": "books/aw/AhoHU74", "title": "The Design and Analysis of Computer Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1974 0-201-00029-6" }
+{ "id": 74, "dblpid": "books/aw/Lamport2002", "title": "Specifying Systems, The TLA+ Language and Tools for Hardware and Software Engineers", "authors": "Leslie Lamport", "misc": "2005-07-28 Addison-Wesley 2002 0-3211-4306-X http //research.microsoft.com/users/lamport/tla/book.html" }
+{ "id": 75, "dblpid": "books/aw/AhoHU83", "title": "Data Structures and Algorithms.", "authors": "Alfred V. Aho John E. Hopcroft Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1983 0-201-00023-7" }
+{ "id": 76, "dblpid": "books/aw/LewisBK01", "title": "Databases and Transaction Processing An Application-Oriented Approach", "authors": "Philip M. Lewis Arthur J. Bernstein Michael Kifer", "misc": "2002-01-03 Addison-Wesley 2001 0-201-70872-8" }
+{ "id": 77, "dblpid": "books/aw/AhoKW88", "title": "The AWK Programming Language", "authors": "Alfred V. Aho Brian W. Kernighan Peter J. Weinberger", "misc": "2002-01-03 Addison-Wesley 1988" }
+{ "id": 78, "dblpid": "books/aw/LindholmY97", "title": "The Java Virtual Machine Specification", "authors": "Tim Lindholm Frank Yellin", "misc": "2002-01-28 Addison-Wesley 1997 0-201-63452-X" }
+{ "id": 79, "dblpid": "books/aw/AhoSU86", "title": "Compilers Princiles, Techniques, and Tools.", "authors": "Alfred V. Aho Ravi Sethi Jeffrey D. Ullman", "misc": "2002-01-03 Addison-Wesley 1986 0-201-10088-6" }
+{ "id": 80, "dblpid": "books/aw/Sedgewick83", "title": "Algorithms", "authors": "Robert Sedgewick", "misc": "2002-01-03 Addison-Wesley 1983 0-201-06672-6" }
+{ "id": 81, "dblpid": "journals/siamcomp/AspnesW96", "title": "Randomized Consensus in Expected O(n log² n) Operations Per Processor.", "authors": "James Aspnes Orli Waarts", "misc": "2002-01-03 1024-1044 1996 25 SIAM J. Comput. 5 db/journals/siamcomp/siamcomp25.html#AspnesW96" }
+{ "id": 82, "dblpid": "conf/focs/AspnesW92", "title": "Randomized Consensus in Expected O(n log ^2 n) Operations Per Processor", "authors": "James Aspnes Orli Waarts", "misc": "2006-04-25 137-146 conf/focs/FOCS33 1992 FOCS db/conf/focs/focs92.html#AspnesW92" }
+{ "id": 83, "dblpid": "journals/siamcomp/Bloniarz83", "title": "A Shortest-Path Algorithm with Expected Time O(n² log n log* n).", "authors": "Peter A. Bloniarz", "misc": "2002-01-03 588-600 1983 12 SIAM J. Comput. 3 db/journals/siamcomp/siamcomp12.html#Bloniarz83" }
+{ "id": 84, "dblpid": "conf/stoc/Bloniarz80", "title": "A Shortest-Path Algorithm with Expected Time O(n^2 log n log ^* n)", "authors": "Peter A. Bloniarz", "misc": "2006-04-25 378-384 conf/stoc/STOC12 1980 STOC db/conf/stoc/stoc80.html#Bloniarz80" }
+{ "id": 85, "dblpid": "journals/siamcomp/Megiddo83a", "title": "Linear-Time Algorithms for Linear Programming in R³ and Related Problems.", "authors": "Nimrod Megiddo", "misc": "2002-01-03 759-776 1983 12 SIAM J. Comput. 4 db/journals/siamcomp/siamcomp12.html#Megiddo83a" }
+{ "id": 86, "dblpid": "conf/focs/Megiddo82", "title": "Linear-Time Algorithms for Linear Programming in R^3 and Related Problems", "authors": "Nimrod Megiddo", "misc": "2006-04-25 329-338 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#Megiddo82" }
+{ "id": 87, "dblpid": "journals/siamcomp/MoffatT87", "title": "An All Pairs Shortest Path Algorithm with Expected Time O(n² log n).", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2002-01-03 1023-1031 1987 16 SIAM J. Comput. 6 db/journals/siamcomp/siamcomp16.html#MoffatT87" }
+{ "id": 88, "dblpid": "conf/focs/MoffatT85", "title": "An All Pairs Shortest Path Algorithm with Expected Running Time O(n^2 log n)", "authors": "Alistair Moffat Tadao Takaoka", "misc": "2006-04-25 101-105 conf/focs/FOCS26 1985 FOCS db/conf/focs/focs85.html#MoffatT85" }
+{ "id": 89, "dblpid": "conf/icip/SchonfeldL98", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-11-05 123-127 1998 ICIP (3) db/conf/icip/icip1998-3.html#SchonfeldL98" }
+{ "id": 90, "dblpid": "conf/hicss/SchonfeldL99", "title": "VORTEX Video Retrieval and Tracking from Compressed Multimedia Databases ¾ Visual Search Engine.", "authors": "Dan Schonfeld Dan Lelescu", "misc": "2002-01-03 1999 HICSS http //computer.org/proceedings/hicss/0001/00013/00013006abs.htm db/conf/hicss/hicss1999-3.html#SchonfeldL99" }
+{ "id": 91, "dblpid": "journals/corr/abs-0802-2861", "title": "Geometric Set Cover and Hitting Sets for Polytopes in $R^3$", "authors": "Sören Laue", "misc": "2008-03-03 http //arxiv.org/abs/0802.2861 2008 CoRR abs/0802.2861 db/journals/corr/corr0802.html#abs-0802-2861 informal publication" }
+{ "id": 92, "dblpid": "conf/stacs/Laue08", "title": "Geometric Set Cover and Hitting Sets for Polytopes in R³.", "authors": "Sören Laue", "misc": "2008-03-04 2008 STACS 479-490 http //drops.dagstuhl.de/opus/volltexte/2008/1367 conf/stacs/2008 db/conf/stacs/stacs2008.html#Laue08" }
+{ "id": 93, "dblpid": "journals/iandc/IbarraJCR91", "title": "Some Classes of Languages in NC¹", "authors": "Oscar H. Ibarra Tao Jiang Jik H. Chang Bala Ravikumar", "misc": "2006-04-25 86-106 Inf. Comput. January 1991 90 1 db/journals/iandc/iandc90.html#IbarraJCR91" }
+{ "id": 94, "dblpid": "conf/awoc/IbarraJRC88", "title": "On Some Languages in NC.", "authors": "Oscar H. Ibarra Tao Jiang Bala Ravikumar Jik H. Chang", "misc": "2002-08-06 64-73 1988 conf/awoc/1988 AWOC db/conf/awoc/awoc88.html#IbarraJRC88" }
+{ "id": 95, "dblpid": "journals/jacm/GalilHLSW87", "title": "An O(n³log n) deterministic and an O(n³) Las Vegs isomorphism test for trivalent graphs.", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2003-11-20 513-531 1987 34 J. ACM 3 http //doi.acm.org/10.1145/28869.28870 db/journals/jacm/jacm34.html#GalilHLSW87" }
+{ "id": 96, "dblpid": "conf/focs/GalilHLSW82", "title": "An O(n^3 log n) Deterministic and an O(n^3) Probabilistic Isomorphism Test for Trivalent Graphs", "authors": "Zvi Galil Christoph M. Hoffmann Eugene M. Luks Claus-Peter Schnorr Andreas Weber", "misc": "2006-04-25 118-125 conf/focs/FOCS23 1982 FOCS db/conf/focs/focs82.html#GalilHLSW82" }
+{ "id": 97, "dblpid": "journals/jacm/GalilT88", "title": "An O(n²(m + n log n)log n) min-cost flow algorithm.", "authors": "Zvi Galil Éva Tardos", "misc": "2003-11-20 374-386 1988 35 J. ACM 2 http //doi.acm.org/10.1145/42282.214090 db/journals/jacm/jacm35.html#GalilT88" }
+{ "id": 98, "dblpid": "conf/focs/GalilT86", "title": "An O(n^2 (m + n log n) log n) Min-Cost Flow Algorithm", "authors": "Zvi Galil Éva Tardos", "misc": "2006-04-25 1-9 conf/focs/FOCS27 1986 FOCS db/conf/focs/focs86.html#GalilT86" }
+{ "id": 99, "dblpid": "series/synthesis/2009Weintraub", "title": "Jordan Canonical Form Theory and Practice", "authors": "Steven H. Weintraub", "misc": "2009-09-06 Jordan Canonical Form Theory and Practice http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 http //dx.doi.org/10.2200/S00218ED1V01Y200908MAS006 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
+{ "id": 100, "dblpid": "series/synthesis/2009Brozos", "title": "The Geometry of Walker Manifolds", "authors": "Miguel Brozos-Vázquez Eduardo García-Río Peter Gilkey Stana Nikcevic Rámon Vázquez-Lorenzo", "misc": "2009-09-06 The Geometry of Walker Manifolds http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 http //dx.doi.org/10.2200/S00197ED1V01Y200906MAS005 2009 Synthesis Lectures on Mathematics & Statistics Morgan & Claypool Publishers" }
diff --git a/asterix-app/src/test/resources/runtimets/results/tpch/q10_returned_item_int64.adm b/asterix-app/src/test/resources/runtimets/results/tpch/q10_returned_item_int64.adm
new file mode 100644
index 0000000..422b480
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/tpch/q10_returned_item_int64.adm
@@ -0,0 +1,20 @@
+{ "c_custkey": 121i64, "c_name": "Customer#000000121", "revenue": 282635.1719d, "c_acctbal": 6428.32d, "n_name": "PERU", "c_address": "tv nCR2YKupGN73mQudO", "c_phone": "27-411-990-2959", "c_comment": "uriously stealthy ideas. carefully final courts use carefully" }
+{ "c_custkey": 124i64, "c_name": "Customer#000000124", "revenue": 222182.5188d, "c_acctbal": 1842.49d, "n_name": "CHINA", "c_address": "aTbyVAW5tCd,v09O", "c_phone": "28-183-750-7809", "c_comment": "le fluffily even dependencies. quietly s" }
+{ "c_custkey": 106i64, "c_name": "Customer#000000106", "revenue": 190241.3334d, "c_acctbal": 3288.42d, "n_name": "ARGENTINA", "c_address": "xGCOEAUjUNG", "c_phone": "11-751-989-4627", "c_comment": "lose slyly. ironic accounts along the evenly regular theodolites wake about the special, final gifts. " }
+{ "c_custkey": 16i64, "c_name": "Customer#000000016", "revenue": 161422.04609999998d, "c_acctbal": 4681.03d, "n_name": "IRAN", "c_address": "cYiaeMLZSMAOQ2 d0W,", "c_phone": "20-781-609-3107", "c_comment": "kly silent courts. thinly regular theodolites sleep fluffily after " }
+{ "c_custkey": 44i64, "c_name": "Customer#000000044", "revenue": 149364.5652d, "c_acctbal": 7315.94d, "n_name": "MOZAMBIQUE", "c_address": "Oi,dOSPwDu4jo4x,,P85E0dmhZGvNtBwi", "c_phone": "26-190-260-5375", "c_comment": "r requests around the unusual, bold a" }
+{ "c_custkey": 71i64, "c_name": "Customer#000000071", "revenue": 129481.02450000001d, "c_acctbal": -611.19d, "n_name": "GERMANY", "c_address": "TlGalgdXWBmMV,6agLyWYDyIz9MKzcY8gl,w6t1B", "c_phone": "17-710-812-5403", "c_comment": "g courts across the regular, final pinto beans are blithely pending ac" }
+{ "c_custkey": 89i64, "c_name": "Customer#000000089", "revenue": 121663.1243d, "c_acctbal": 1530.76d, "n_name": "KENYA", "c_address": "dtR, y9JQWUO6FoJExyp8whOU", "c_phone": "24-394-451-5404", "c_comment": "counts are slyly beyond the slyly final accounts. quickly final ideas wake. r" }
+{ "c_custkey": 112i64, "c_name": "Customer#000000112", "revenue": 111137.7141d, "c_acctbal": 2953.35d, "n_name": "ROMANIA", "c_address": "RcfgG3bO7QeCnfjqJT1", "c_phone": "29-233-262-8382", "c_comment": "rmanently unusual multipliers. blithely ruthless deposits are furiously along the" }
+{ "c_custkey": 62i64, "c_name": "Customer#000000062", "revenue": 106368.0153d, "c_acctbal": 595.61d, "n_name": "GERMANY", "c_address": "upJK2Dnw13,", "c_phone": "17-361-978-7059", "c_comment": "kly special dolphins. pinto beans are slyly. quickly regular accounts are furiously a" }
+{ "c_custkey": 146i64, "c_name": "Customer#000000146", "revenue": 103265.98879999999d, "c_acctbal": 3328.68d, "n_name": "CANADA", "c_address": "GdxkdXG9u7iyI1,,y5tq4ZyrcEy", "c_phone": "13-835-723-3223", "c_comment": "ffily regular dinos are slyly unusual requests. slyly specia" }
+{ "c_custkey": 19i64, "c_name": "Customer#000000019", "revenue": 99306.0127d, "c_acctbal": 8914.71d, "n_name": "CHINA", "c_address": "uc,3bHIx84H,wdrmLOjVsiqXCq2tr", "c_phone": "28-396-526-5053", "c_comment": " nag. furiously careful packages are slyly at the accounts. furiously regular in" }
+{ "c_custkey": 145i64, "c_name": "Customer#000000145", "revenue": 99256.9018d, "c_acctbal": 9748.93d, "n_name": "JORDAN", "c_address": "kQjHmt2kcec cy3hfMh969u", "c_phone": "23-562-444-8454", "c_comment": "ests? express, express instructions use. blithely fina" }
+{ "c_custkey": 103i64, "c_name": "Customer#000000103", "revenue": 97311.77240000002d, "c_acctbal": 2757.45d, "n_name": "INDONESIA", "c_address": "8KIsQX4LJ7QMsj6DrtFtXu0nUEdV,8a", "c_phone": "19-216-107-2107", "c_comment": "furiously pending notornis boost slyly around the blithely ironic ideas? final, even instructions cajole fl" }
+{ "c_custkey": 136i64, "c_name": "Customer#000000136", "revenue": 95855.39799999999d, "c_acctbal": -842.39d, "n_name": "GERMANY", "c_address": "QoLsJ0v5C1IQbh,DS1", "c_phone": "17-501-210-4726", "c_comment": "ackages sleep ironic, final courts. even requests above the blithely bold requests g" }
+{ "c_custkey": 53i64, "c_name": "Customer#000000053", "revenue": 92568.9124d, "c_acctbal": 4113.64d, "n_name": "MOROCCO", "c_address": "HnaxHzTfFTZs8MuCpJyTbZ47Cm4wFOOgib", "c_phone": "25-168-852-5363", "c_comment": "ar accounts are. even foxes are blithely. fluffily pending deposits boost" }
+{ "c_custkey": 49i64, "c_name": "Customer#000000049", "revenue": 90965.7262d, "c_acctbal": 4573.94d, "n_name": "IRAN", "c_address": "cNgAeX7Fqrdf7HQN9EwjUa4nxT,68L FKAxzl", "c_phone": "20-908-631-4424", "c_comment": "nusual foxes! fluffily pending packages maintain to the regular " }
+{ "c_custkey": 37i64, "c_name": "Customer#000000037", "revenue": 88065.74579999999d, "c_acctbal": -917.75d, "n_name": "INDIA", "c_address": "7EV4Pwh,3SboctTWt", "c_phone": "18-385-235-7162", "c_comment": "ilent packages are carefully among the deposits. furiousl" }
+{ "c_custkey": 82i64, "c_name": "Customer#000000082", "revenue": 86998.9644d, "c_acctbal": 9468.34d, "n_name": "CHINA", "c_address": "zhG3EZbap4c992Gj3bK,3Ne,Xn", "c_phone": "28-159-442-5305", "c_comment": "s wake. bravely regular accounts are furiously. regula" }
+{ "c_custkey": 125i64, "c_name": "Customer#000000125", "revenue": 84808.068d, "c_acctbal": -234.12d, "n_name": "ROMANIA", "c_address": ",wSZXdVR xxIIfm9s8ITyLl3kgjT6UC07GY0Y", "c_phone": "29-261-996-3120", "c_comment": "x-ray finally after the packages? regular requests c" }
+{ "c_custkey": 59i64, "c_name": "Customer#000000059", "revenue": 84655.5711d, "c_acctbal": 3458.6d, "n_name": "ARGENTINA", "c_address": "zLOCP0wh92OtBihgspOGl4", "c_phone": "11-355-584-3112", "c_comment": "ously final packages haggle blithely after the express deposits. furiou" }
diff --git a/asterix-app/src/test/resources/runtimets/results/user-defined-functions/query-issue201.adm b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/query-issue201.adm
new file mode 100644
index 0000000..190423f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/user-defined-functions/query-issue201.adm
@@ -0,0 +1,100 @@
+1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 145ef62..9ae7c60 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -739,6 +739,11 @@
</test-group>
<test-group name="dml">
<test-case FilePath="dml">
+ <compilation-unit name="query-issue205">
+ <output-file compare="Text">query-issue205.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="dml">
<compilation-unit name="delete-from-loaded-dataset-with-index">
<output-file compare="Text">delete-from-loaded-dataset-with-index.adm</output-file>
</compilation-unit>
@@ -1474,6 +1479,12 @@
</compilation-unit>
</test-case>
<test-case FilePath="fuzzyjoin">
+ <compilation-unit name="dblp-aqlplus_2">
+ <output-file compare="Text">dblp-aqlplus_2.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="fuzzyjoin">
<compilation-unit name="dblp-csx-2_1">
<output-file compare="Text">dblp-csx-2_1.adm</output-file>
</compilation-unit>
@@ -2363,6 +2374,36 @@
<output-file compare="Text">open-closed-14.adm</output-file>
</compilation-unit>
</test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue134">
+ <output-file compare="Text">query-issue134.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue55">
+ <output-file compare="Text">query-issue55.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue55-1">
+ <output-file compare="Text">query-issue55-1.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue166">
+ <output-file compare="Text">query-issue166.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue208">
+ <output-file compare="Text">query-issue208.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="open-closed">
+ <compilation-unit name="query-issue236">
+ <output-file compare="Text">query-issue236.adm</output-file>
+ </compilation-unit>
+ </test-case>
<!--
<test-case FilePath="open-closed">
<compilation-unit name="open-closed-15">
@@ -2561,6 +2602,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="records">
+ <compilation-unit name="field-access-on-open-field">
+ <output-file compare="Text">field-access-on-open-field.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="records">
<compilation-unit name="open-record-constructor_01">
<output-file compare="Text">open-record-constructor_01.adm</output-file>
</compilation-unit>
@@ -2583,6 +2629,16 @@
</compilation-unit>
</test-case>
<test-case FilePath="scan">
+ <compilation-unit name="issue238_query_1">
+ <output-file compare="Text">issue238_query_1.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="scan">
+ <compilation-unit name="issue238_query_2">
+ <output-file compare="Text">issue238_query_2.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="scan">
<compilation-unit name="30">
<output-file compare="Text">30.adm</output-file>
</compilation-unit>
@@ -3402,6 +3458,11 @@
</compilation-unit>
</test-case>
<test-case FilePath="tpch">
+ <compilation-unit name="q10_returned_item_int64">
+ <output-file compare="Text">q10_returned_item_int64.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="tpch">
<compilation-unit name="q11_important_stock">
<output-file compare="Text">q11_important_stock.adm</output-file>
</compilation-unit>
@@ -3621,6 +3682,11 @@
</test-group>
<test-group name="user-defined-functions">
<test-case FilePath="user-defined-functions">
+ <compilation-unit name="query-issue201">
+ <output-file compare="Text">query-issue201.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="user-defined-functions">
<compilation-unit name="udf01">
<output-file compare="Text">udf01.adm</output-file>
</compilation-unit>
@@ -3771,4 +3837,56 @@
</compilation-unit>
</test-case>
</test-group>
-</test-suite>
\ No newline at end of file
+ <test-group name="load">
+ <test-case FilePath="load">
+ <compilation-unit name="issue14_query">
+ <output-file compare="Text">none.adm</output-file>
+ <expected-error>edu.uci.ics.asterix.common.exceptions.AsterixException</expected-error>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="feeds">
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_01">
+ <output-file compare="Text">feeds_01.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_02">
+ <output-file compare="Text">feeds_02.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_03">
+ <output-file compare="Text">feeds_03.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="feeds_04">
+ <output-file compare="Text">feeds_04.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="feeds">
+ <compilation-unit name="issue_230_feeds">
+ <output-file compare="Text">issue_230_feeds.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+ <test-group name="hdfs">
+ <test-case FilePath="hdfs">
+ <compilation-unit name="issue_245_hdfs">
+ <output-file compare="Text">issue_245_hdfs.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="hdfs">
+ <compilation-unit name="hdfs_02">
+ <output-file compare="Text">hdfs_02.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ <test-case FilePath="hdfs">
+ <compilation-unit name="hdfs_03">
+ <output-file compare="Text">hdfs_03.adm</output-file>
+ </compilation-unit>
+ </test-case>
+ </test-group>
+</test-suite>
diff --git a/asterix-aql/src/main/javacc/AQL.jj b/asterix-aql/src/main/javacc/AQL.jj
index eaa4260..8672fd1 100644
--- a/asterix-aql/src/main/javacc/AQL.jj
+++ b/asterix-aql/src/main/javacc/AQL.jj
@@ -805,7 +805,7 @@
dataverse = nameComponents.first != null ? nameComponents.first.getValue() : defaultDataverse;
functionName = nameComponents.second.getValue();
}
- ("@" <IDENTIFIER>
+ ("@" <INTEGER_LITERAL>
{
arity = Integer.parseInt(token.image);
}
@@ -866,7 +866,7 @@
{
configuration = getConfiguration();
}
- ";"
+
{
return new ControlFeedStatement(ControlFeedStatement.OperationType.ALTER, nameComponents.first, nameComponents.second, configuration);
}
diff --git a/asterix-common/pom.xml b/asterix-common/pom.xml
index 048c037..ee81e75 100644
--- a/asterix-common/pom.xml
+++ b/asterix-common/pom.xml
@@ -25,13 +25,11 @@
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
index 144a8824..00a2651 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/api/AsterixAppContextInfoImpl.java
@@ -1,22 +1,50 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.common.api;
-import java.util.Map;
-import java.util.Set;
-
import edu.uci.ics.asterix.common.context.AsterixIndexRegistryProvider;
import edu.uci.ics.asterix.common.context.AsterixStorageManagerInterface;
import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+/*
+ * Acts as an holder class for IndexRegistryProvider, AsterixStorageManager
+ * instances that are accessed from the NCs. In addition an instance of ICCApplicationContext
+ * is stored for access by the CC.
+ */
public class AsterixAppContextInfoImpl implements IAsterixApplicationContextInfo {
- public static final AsterixAppContextInfoImpl INSTANCE = new AsterixAppContextInfoImpl();
+ private static AsterixAppContextInfoImpl INSTANCE;
- private static Map<String, Set<String>> nodeControllerMap;
+ private final ICCApplicationContext appCtx;
- private AsterixAppContextInfoImpl() {
+ public static void initialize(ICCApplicationContext ccAppCtx) {
+ if (INSTANCE == null) {
+ INSTANCE = new AsterixAppContextInfoImpl(ccAppCtx);
+ }
+ }
+
+ private AsterixAppContextInfoImpl(ICCApplicationContext ccAppCtx) {
+ this.appCtx = ccAppCtx;
+ }
+
+ public static IAsterixApplicationContextInfo getInstance() {
+ return INSTANCE;
}
@Override
@@ -29,12 +57,9 @@
return AsterixStorageManagerInterface.INSTANCE;
}
- public static void setNodeControllerInfo(Map<String, Set<String>> nodeControllerInfo) {
- nodeControllerMap = nodeControllerInfo;
- }
-
- public static Map<String, Set<String>> getNodeControllerMap() {
- return nodeControllerMap;
+ @Override
+ public ICCApplicationContext getCCApplicationContext() {
+ return appCtx;
}
}
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
index d676cb5..f84f294 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/context/AsterixAppRuntimeContext.java
@@ -12,9 +12,11 @@
import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageCleanerPolicy;
import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@@ -22,7 +24,7 @@
public class AsterixAppRuntimeContext {
private static final int DEFAULT_BUFFER_CACHE_PAGE_SIZE = 32768;
private final INCApplicationContext ncApplicationContext;
-
+
private IndexRegistry<IIndex> indexRegistry;
private IFileMapManager fileMapManager;
private IBufferCache bufferCache;
@@ -43,7 +45,8 @@
ICacheMemoryAllocator allocator = new HeapBufferAllocator();
IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
IIOManager ioMgr = ncApplicationContext.getRootContext().getIOManager();
- bufferCache = new BufferCache(ioMgr, allocator, prs, fileMapManager, pageSize, numPages, Integer.MAX_VALUE);
+ IPageCleanerPolicy pcp = new DelayPageCleanerPolicy(600000);
+ bufferCache = new BufferCache(ioMgr, allocator, prs, pcp, fileMapManager, pageSize, numPages, Integer.MAX_VALUE);
// Initialize the index registry
indexRegistry = new IndexRegistry<IIndex>();
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
index 7bb0fd6..032e832 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/dataflow/IAsterixApplicationContextInfo.java
@@ -1,11 +1,48 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.common.dataflow;
+import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+/**
+ * Provides methods for obtaining the IIndexRegistryProvider, IStorageManager and
+ * ICCApplicationContext implementation.
+ */
public interface IAsterixApplicationContextInfo {
+
+ /**
+ * Returns an instance of the implementation for IIndexRegistryProvider.
+ *
+ * @return IIndexRegistryProvider implementation instance
+ */
public IIndexRegistryProvider<IIndex> getIndexRegistryProvider();
+ /**
+ * Returns an instance of the implementation for IStorageManagerInterface.
+ *
+ * @return IStorageManagerInterface implementation instance
+ */
public IStorageManagerInterface getStorageManagerInterface();
+
+ /**
+ * Returns an instance of the implementation for ICCApplicationContext.
+ *
+ * @return ICCApplicationContext implementation instance
+ */
+ public ICCApplicationContext getCCApplicationContext();
}
diff --git a/asterix-dist/pom.xml b/asterix-dist/pom.xml
index 53168b4..654a11e 100644
--- a/asterix-dist/pom.xml
+++ b/asterix-dist/pom.xml
@@ -1,54 +1,55 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <artifactId>asterix-dist</artifactId>
- <parent>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix</artifactId>
- <version>0.0.4-SNAPSHOT</version>
- </parent>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>asterix-dist</artifactId>
+ <parent>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix</artifactId>
+ <version>0.0.4-SNAPSHOT</version>
+ </parent>
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.2-beta-5</version>
- <executions>
- <execution>
- <configuration>
- <descriptors>
- <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
- </descriptors>
- </configuration>
- <phase>package</phase>
- <goals>
- <goal>attached</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- <dependencies>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-server</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- <type>zip</type>
- <classifier>binary-assembly</classifier>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-cli</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- <type>zip</type>
- <classifier>binary-assembly</classifier>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.asterix</groupId>
- <artifactId>asterix-app</artifactId>
- <version>0.0.4-SNAPSHOT</version>
- <type>zip</type>
- <classifier>binary-assembly</classifier>
- </dependency>
- </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-5</version>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>attached</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-server</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ <type>zip</type>
+ <classifier>binary-assembly</classifier>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-cli</artifactId>
+ <version>0.2.3-SNAPSHOT</version>
+ <type>zip</type>
+ <classifier>binary-assembly</classifier>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.asterix</groupId>
+ <artifactId>asterix-app</artifactId>
+ <version>0.0.4-SNAPSHOT</version>
+ <type>zip</type>
+ <classifier>binary-assembly</classifier>
+ </dependency>
+ </dependencies>
</project>
diff --git a/asterix-external-data/pom.xml b/asterix-external-data/pom.xml
index 7da6bd9..36e7a71 100644
--- a/asterix-external-data/pom.xml
+++ b/asterix-external-data/pom.xml
@@ -5,10 +5,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-external-data</artifactId>
- <version>0.0.4-SNAPSHOT</version>
-
<build>
<plugins>
<plugin>
@@ -131,7 +128,6 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-hadoop</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>jdom</groupId>
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
index 7d5984a..f1f5884 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -5,7 +19,10 @@
import edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-public class CNNFeedAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * A factory class for creating the @see {CNNFeedAdapter}.
+ */
+public class CNNFeedAdapterFactory implements ITypedDatasetAdapterFactory {
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -19,14 +36,4 @@
return "cnn_feed";
}
- @Override
- public FeedAdapterType getFeedAdapterType() {
- return FeedAdapterType.TYPED;
- }
-
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.FEED;
- }
-
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
index f0e30b69..6fcb710 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HDFSAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -6,7 +20,12 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-public class HDFSAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * A factory class for creating an instance of HDFSAdapter
+ */
+public class HDFSAdapterFactory implements IGenericDatasetAdapterFactory {
+
+ public static final String HDFS_ADAPTER_NAME = "hdfs";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception {
@@ -16,13 +35,8 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
- return "hdfs";
+ return HDFS_ADAPTER_NAME;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
index b21abe6..5e28eed 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -6,7 +20,10 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-public class HiveAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * A factory class for creating an instance of HiveAdapter
+ */
+public class HiveAdapterFactory implements IGenericDatasetAdapterFactory {
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType type) throws Exception {
@@ -16,11 +33,6 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
return "hive";
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
index ca59da7..45fd6cf 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IAdapterFactory.java
@@ -1,13 +1,30 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
+/**
+ * Base interface for IGenericDatasetAdapterFactory and ITypedDatasetAdapterFactory.
+ * Acts as a marker interface indicating that the implementation provides functionality
+ * for creating an adapter.
+ */
public interface IAdapterFactory {
- public enum AdapterType {
- EXTERNAL_DATASET,
- FEED
- }
-
- public AdapterType getAdapterType();
-
+ /**
+ * Returns the display name corresponding to the Adapter type that is created by the factory.
+ *
+ * @return the display name
+ */
public String getName();
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java
deleted file mode 100644
index 22768a3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IExternalDatasetAdapterFactory.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public interface IExternalDatasetAdapterFactory extends IAdapterFactory {
-
- public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType sourceType) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java
deleted file mode 100644
index a7d5998..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-
-public interface IFeedDatasetAdapterFactory extends IAdapterFactory {
-
- public enum FeedAdapterType {
- GENERIC,
- TYPED
- }
-
- public FeedAdapterType getFeedAdapterType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
new file mode 100644
index 0000000..093a3dd
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericDatasetAdapterFactory.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.IAType;
+
+/**
+ * A base interface for an adapter factory that creates instance of an adapter kind that
+ * is 'generic' in nature. A 'typed' adapter returns records with a configurable datatype.
+ */
+public interface IGenericDatasetAdapterFactory extends IAdapterFactory {
+
+ public static final String KEY_TYPE_NAME = "output-type-name";
+
+ /**
+ * Creates an instance of IDatasourceAdapter.
+ *
+ * @param configuration
+ * The configuration parameters for the adapter that is instantiated.
+ * The passed-in configuration is used to configure the created instance of the adapter.
+ * @param atype
+ * The type for the ADM records that are returned by the adapter.
+ * @return An instance of IDatasourceAdapter.
+ * @throws Exception
+ */
+ public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java
deleted file mode 100644
index 34eeff2..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/IGenericFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.om.types.IAType;
-
-public interface IGenericFeedDatasetAdapterFactory extends IFeedDatasetAdapterFactory {
-
- public static final String KEY_TYPE_NAME="output-type-name";
-
- public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
new file mode 100644
index 0000000..0f9978e
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedDatasetAdapterFactory.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.adapter.factory;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+
+/**
+ * A base interface for an adapter factory that creates instance of an adapter kind that
+ * is 'typed' in nature. A 'typed' adapter returns records with a pre-defined datatype.
+ */
+public interface ITypedDatasetAdapterFactory extends IAdapterFactory {
+
+ /**
+ * Creates an instance of IDatasourceAdapter.
+ *
+ * @param configuration
+ * The configuration parameters for the adapter that is instantiated.
+ * The passed-in configuration is used to configure the created instance of the adapter.
+ * @return An instance of IDatasourceAdapter.
+ * @throws Exception
+ */
+ public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java
deleted file mode 100644
index 84aa88d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/ITypedFeedDatasetAdapterFactory.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package edu.uci.ics.asterix.external.adapter.factory;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-
-public interface ITypedFeedDatasetAdapterFactory extends IFeedDatasetAdapterFactory {
-
- public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
index ed43371..2040949 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/NCFileSystemAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -6,7 +20,14 @@
import edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter;
import edu.uci.ics.asterix.om.types.IAType;
-public class NCFileSystemAdapterFactory implements IExternalDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of NCFileSystemAdapter. An
+ * NCFileSystemAdapter reads external data residing on the local file system of
+ * an NC.
+ */
+public class NCFileSystemAdapterFactory implements IGenericDatasetAdapterFactory {
+
+ public static final String NC_FILE_SYSTEM_ADAPTER_NAME = "localfs";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType atype) throws Exception {
@@ -16,12 +37,7 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
- return "localfs";
+ return NC_FILE_SYSTEM_ADAPTER_NAME;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
index 46a8004..bc00469 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -5,7 +19,14 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
-public class PullBasedTwitterAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of PullBasedTwitterAdapter.
+ * This adapter provides the functionality of fetching tweets from Twitter service
+ * via pull-based Twitter API.
+ */
+public class PullBasedTwitterAdapterFactory implements ITypedDatasetAdapterFactory {
+
+ public static final String PULL_BASED_TWITTER_ADAPTER_NAME = "pull_twitter";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -15,18 +36,8 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.EXTERNAL_DATASET;
- }
-
- @Override
public String getName() {
- return "pull_twitter";
- }
-
- @Override
- public FeedAdapterType getFeedAdapterType() {
- return FeedAdapterType.TYPED;
+ return PULL_BASED_TWITTER_ADAPTER_NAME;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
index 1154d8a..bbbea38 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/RSSFeedAdapterFactory.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.adapter.factory;
import java.util.Map;
@@ -5,7 +19,13 @@
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
-public class RSSFeedAdapterFactory implements ITypedFeedDatasetAdapterFactory {
+/**
+ * Factory class for creating an instance of @see {RSSFeedAdapter}.
+ * RSSFeedAdapter provides the functionality of fetching an RSS based feed.
+ */
+public class RSSFeedAdapterFactory implements ITypedDatasetAdapterFactory {
+
+ public static final String RSS_FEED_ADAPTER_NAME = "rss_feed";
@Override
public IDatasourceAdapter createAdapter(Map<String, String> configuration) throws Exception {
@@ -15,18 +35,8 @@
}
@Override
- public AdapterType getAdapterType() {
- return AdapterType.FEED;
- }
-
- @Override
public String getName() {
return "rss_feed";
}
- @Override
- public FeedAdapterType getFeedAdapterType() {
- return FeedAdapterType.TYPED;
- }
-
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
index 6dbec48..fb4cc99 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/ExternalDataScanOperatorDescriptor.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -16,7 +16,7 @@
import java.util.Map;
-import edu.uci.ics.asterix.external.adapter.factory.IExternalDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
@@ -30,13 +30,18 @@
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+/*
+ * A single activity operator that provides the functionality of scanning data using an
+ * instance of the configured adapter.
+ */
public class ExternalDataScanOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
private static final long serialVersionUID = 1L;
private final String adapterFactory;
private final Map<String, String> adapterConfiguration;
private final IAType atype;
- private IExternalDatasetAdapterFactory datasourceAdapterFactory;
+ private IGenericDatasetAdapterFactory datasourceAdapterFactory;
public ExternalDataScanOperatorDescriptor(JobSpecification spec, String adapter, Map<String, String> arguments,
IAType atype, RecordDescriptor rDesc) {
@@ -79,11 +84,12 @@
}
+ @Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
throws HyracksDataException {
try {
- datasourceAdapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactory).newInstance();
+ datasourceAdapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactory).newInstance();
} catch (Exception e) {
throw new HyracksDataException("initialization of adapter failed", e);
}
@@ -93,7 +99,7 @@
writer.open();
IDatasourceAdapter adapter = null;
try {
- adapter = ((IExternalDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
+ adapter = ((IGenericDatasetAdapterFactory) datasourceAdapterFactory).createAdapter(
adapterConfiguration, atype);
adapter.initialize(ctx);
adapter.start(partition, writer);
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
new file mode 100644
index 0000000..2da4e76
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorDescriptor.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Operator responsible for ingesting data from an external source. This
+ * operator uses a (configurable) adapter associated with the feed dataset.
+ */
+public class FeedIntakeOperatorDescriptor extends
+ AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String adapterFactoryClassName;
+ private final Map<String, String> adapterConfiguration;
+ private final IAType atype;
+ private final FeedId feedId;
+
+ private transient IAdapterFactory datasourceAdapterFactory;
+
+ public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId,
+ String adapter, Map<String, String> arguments, ARecordType atype,
+ RecordDescriptor rDesc) {
+ super(spec, 1, 1);
+ recordDescriptors[0] = rDesc;
+ this.adapterFactoryClassName = adapter;
+ this.adapterConfiguration = arguments;
+ this.atype = atype;
+ this.feedId = feedId;
+ }
+
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, final int partition,
+ int nPartitions) throws HyracksDataException {
+ ITypedDatasourceAdapter adapter;
+ try {
+ datasourceAdapterFactory = (IAdapterFactory) Class.forName(
+ adapterFactoryClassName).newInstance();
+ if (datasourceAdapterFactory instanceof IGenericDatasetAdapterFactory) {
+ adapter = (ITypedDatasourceAdapter) ((IGenericDatasetAdapterFactory) datasourceAdapterFactory)
+ .createAdapter(adapterConfiguration, atype);
+ } else if (datasourceAdapterFactory instanceof ITypedDatasetAdapterFactory) {
+ adapter = (ITypedDatasourceAdapter) ((ITypedDatasetAdapterFactory) datasourceAdapterFactory)
+ .createAdapter(adapterConfiguration);
+ } else {
+ throw new IllegalStateException(
+ " Unknown adapter factory type for "
+ + adapterFactoryClassName);
+ }
+ adapter.initialize(ctx);
+ } catch (Exception e) {
+ throw new HyracksDataException("initialization of adapter failed",
+ e);
+ }
+ return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
similarity index 60%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
index ff6216a..29fe72a 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorNodePushable.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedIntakeOperatorNodePushable.java
@@ -1,19 +1,35 @@
-package edu.uci.ics.asterix.feed.operator;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.feed.comm.AlterFeedMessage;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.AlterFeedMessage;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.mgmt.FeedSystemProvider;
-import edu.uci.ics.asterix.feed.mgmt.IFeedManager;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+/**
+ * The runtime for @see{FeedIntakeOperationDescriptor}
+ */
public class FeedIntakeOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
private final IDatasourceAdapter adapter;
@@ -26,7 +42,7 @@
public FeedIntakeOperatorNodePushable(FeedId feedId, IDatasourceAdapter adapter, int partition) {
this.adapter = adapter;
this.partition = partition;
- this.feedManager = (IFeedManager) FeedSystemProvider.getFeedManager();
+ this.feedManager = (IFeedManager) FeedManager.INSTANCE;
this.feedId = feedId;
inbox = new LinkedBlockingQueue<IFeedMessage>();
}
@@ -36,21 +52,25 @@
if (adapter instanceof IManagedFeedAdapter) {
feedInboxMonitor = new FeedInboxMonitor((IManagedFeedAdapter) adapter, inbox, partition);
feedInboxMonitor.start();
- feedManager.registerFeedOperatorMsgQueue(feedId, inbox);
+ feedManager.registerFeedMsgQueue(feedId, inbox);
}
writer.open();
try {
adapter.start(partition, writer);
} catch (Exception e) {
e.printStackTrace();
- // we do not throw an exception, but allow the operator to close
- // gracefully
- // Throwing an exception here would result in a job abort and a
- // transaction roll back
- // that undoes all the work done so far.
+ throw new HyracksDataException(e);
+ /*
+ we do not throw an exception, but allow the operator to close
+ gracefully throwing an exception here would result in a job abort and a
+ transaction roll back that undoes all the work done so far.
+ */
} finally {
writer.close();
+ if (adapter instanceof IManagedFeedAdapter) {
+ feedManager.unregisterFeedMsgQueue(feedId, inbox);
+ }
}
}
@@ -66,7 +86,7 @@
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- // TODO Auto-generated method stub
+ // do nothing
}
}
@@ -86,17 +106,11 @@
try {
IFeedMessage feedMessage = inbox.take();
switch (feedMessage.getMessageType()) {
- case SUSPEND:
- adapter.suspend();
- break;
- case RESUME:
- adapter.resume();
- break;
case STOP:
adapter.stop();
break;
case ALTER:
- ((IMutableFeedAdapter) adapter).alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
+ adapter.alter(((AlterFeedMessage) feedMessage).getAlteredConfParams());
break;
}
} catch (InterruptedException ie) {
@@ -107,4 +121,4 @@
}
}
-}
\ No newline at end of file
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
new file mode 100644
index 0000000..d0dc5ca
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorDescriptor.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.List;
+
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+/**
+ * Sends a control message to the registered message queue for feed specified by its feedId.
+ */
+public class FeedMessageOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private final FeedId feedId;
+ private final List<IFeedMessage> feedMessages;
+ private final boolean sendToAll = true;
+
+ public FeedMessageOperatorDescriptor(JobSpecification spec, String dataverse, String dataset,
+ List<IFeedMessage> feedMessages) {
+ super(spec, 0, 1);
+ this.feedId = new FeedId(dataverse, dataset);
+ this.feedMessages = feedMessages;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages, sendToAll, partition, nPartitions);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
new file mode 100644
index 0000000..d03eeaa
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/data/operator/FeedMessageOperatorNodePushable.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.data.operator;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedManager;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+/**
+ * Runtime for the @see{FeedMessageOperatorDescriptor}
+ */
+public class FeedMessageOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+
+ private final FeedId feedId;
+ private final List<IFeedMessage> feedMessages;
+ private IFeedManager feedManager;
+
+ public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx, FeedId feedId, List<IFeedMessage> feedMessages,
+ boolean applyToAll, int partition, int nPartitions) {
+ this.feedId = feedId;
+ if (applyToAll) {
+ this.feedMessages = feedMessages;
+ } else {
+ this.feedMessages = new ArrayList<IFeedMessage>();
+ feedMessages.add(feedMessages.get(partition));
+ }
+ feedManager = (IFeedManager) FeedManager.INSTANCE;
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ try {
+ writer.open();
+ for (IFeedMessage feedMessage : feedMessages) {
+ feedManager.deliverMessage(feedId, feedMessage);
+ }
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ } finally {
+ writer.close();
+ }
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
index 64c8853..440ee8c 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractDatasourceAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -34,66 +34,60 @@
*/
public abstract class AbstractDatasourceAdapter implements IDatasourceAdapter {
- private static final long serialVersionUID = -3510610289692452466L;
+ private static final long serialVersionUID = 1L;
- protected Map<String, String> configuration;
- protected transient AlgebricksPartitionConstraint partitionConstraint;
- protected IAType atype;
- protected IHyracksTaskContext ctx;
- protected AdapterType adapterType;
- protected boolean typeInfoRequired = false;
-
-
- protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
- static {
- typeToValueParserFactMap.put(ATypeTag.INT32,
- IntegerParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.FLOAT,
- FloatParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.DOUBLE,
- DoubleParserFactory.INSTANCE);
- typeToValueParserFactMap
- .put(ATypeTag.INT64, LongParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.STRING,
- UTF8StringParserFactory.INSTANCE);
- }
+ protected Map<String, String> configuration;
+ protected transient AlgebricksPartitionConstraint partitionConstraint;
+ protected IAType atype;
+ protected IHyracksTaskContext ctx;
+ protected AdapterType adapterType;
- protected static final HashMap<String, String> formatToParserFactoryMap = new HashMap<String, String>();
+ protected static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
+ static {
+ typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
+ }
- public static final String KEY_FORMAT = "format";
- public static final String KEY_PARSER_FACTORY = "parser";
+ protected static final Map<String, String> formatToParserFactoryMap = initializeFormatParserFactoryMap();
- public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
- public static final String FORMAT_ADM = "adm";
+ public static final String KEY_FORMAT = "format";
+ public static final String KEY_PARSER_FACTORY = "parser";
+ public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
+ public static final String FORMAT_ADM = "adm";
- static {
- formatToParserFactoryMap
- .put(FORMAT_DELIMITED_TEXT,
- "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
- formatToParserFactoryMap
- .put(FORMAT_ADM,
- "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+ private static Map<String, String> initializeFormatParserFactoryMap() {
+ Map<String, String> map = new HashMap<String, String>();
+ map.put(FORMAT_DELIMITED_TEXT, "edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory");
+ map.put(FORMAT_ADM, "edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory");
+ return map;
+ }
- }
+ /**
+ * Get the partition constraint chosen by the adapter.
+ * An adapter may have preferences as to where it needs to be instantiated and used.
+ */
+ public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
- public AlgebricksPartitionConstraint getPartitionConstraint() {
- return partitionConstraint;
- }
+ /**
+ * Get the configured value from the adapter configuration parameters, corresponding to the an attribute.
+ *
+ * @param attribute
+ * The attribute whose value needs to be obtained.
+ */
+ public String getAdapterProperty(String attribute) {
+ return configuration.get(attribute);
+ }
- public String getAdapterProperty(String attribute) {
- return configuration.get(attribute);
- }
-
- public Map<String, String> getConfiguration() {
- return configuration;
- }
-
- public void setAdapterProperty(String property, String value) {
- configuration.put(property, value);
- }
-
- public boolean isTypeInfoRequired() {
- return typeInfoRequired;
+ /**
+ * Get the adapter configuration parameters.
+ *
+ * @return A Map<String,String> instance representing the adapter configuration.
+ */
+ public Map<String, String> getConfiguration() {
+ return configuration;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java
deleted file mode 100644
index 8f2a896..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AbstractFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-public abstract class AbstractFeedDatasourceAdapter extends AbstractDatasourceAdapter implements IFeedDatasourceAdapter {
-
- protected AdapterDataFlowType adapterDataFlowType;
- protected ARecordType adapterOutputType;
-
- public AdapterDataFlowType getAdapterDataFlowType() {
- return adapterDataFlowType;
- }
-
- public ARecordType getAdapterOutputType() {
- return adapterOutputType;
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
index 1a6be25..ac36733 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/AdapterIdentifier.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -16,8 +16,13 @@
import java.io.Serializable;
+/**
+ * A unique identifier for a datasource adapter.
+ */
public class AdapterIdentifier implements Serializable {
+ private static final long serialVersionUID = 1L;
+
private final String namespace;
private final String adapterName;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
index 3760d56..3898f7e 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.external.dataset.adapter;
import java.util.ArrayList;
@@ -5,19 +19,20 @@
import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IMutableFeedAdapter {
+/**
+ * An Adapter that provides the functionality of fetching news feed from CNN service
+ * The Adapter provides news feed as ADM records.
+ */
+public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IManagedFeedAdapter {
private static final long serialVersionUID = 2523303758114582251L;
private List<String> feedURLs = new ArrayList<String>();
- private String id_prefix = "";
+ private static Map<String, String> topicFeeds = new HashMap<String, String>();
public static final String KEY_RSS_URL = "topic";
public static final String KEY_INTERVAL = "interval";
-
- private static Map<String, String> topicFeeds = new HashMap<String, String>();
-
public static final String TOP_STORIES = "topstories";
public static final String WORLD = "world";
public static final String US = "us";
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
index dde273e..e46705d 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/FileSystemBasedAdapter.java
@@ -16,14 +16,15 @@
import java.io.IOException;
import java.io.InputStream;
-import java.lang.reflect.Constructor;
import java.util.Map;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.runtime.operators.file.AdmSchemafullRecordParserFactory;
import edu.uci.ics.asterix.runtime.operators.file.NtDelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -33,10 +34,13 @@
public abstract class FileSystemBasedAdapter extends AbstractDatasourceAdapter {
- protected boolean userDefinedParser = false;
- protected String parserFactoryClassname;
+ private static final long serialVersionUID = 1L;
+
+ protected ITupleParserFactory parserFactory;
+ protected ITupleParser parser;
public static final String KEY_DELIMITER = "delimiter";
+ public static final String KEY_PATH = "path";
public abstract InputStream getInputStream(int partition) throws IOException;
@@ -44,13 +48,10 @@
this.atype = atype;
}
- public FileSystemBasedAdapter() {
- }
-
@Override
public void start(int partition, IFrameWriter writer) throws Exception {
InputStream in = getInputStream(partition);
- ITupleParser parser = getTupleParser();
+ parser = getTupleParser();
parser.parse(in, writer);
}
@@ -63,44 +64,33 @@
@Override
public abstract AdapterType getAdapterType();
+ @Override
+ public abstract AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
+
protected ITupleParser getTupleParser() throws Exception {
- ITupleParser parser = null;
- if (userDefinedParser) {
- Class tupleParserFactoryClass = Class.forName(parserFactoryClassname);
- ITupleParserFactory parserFactory = (ITupleParserFactory) tupleParserFactoryClass.newInstance();
- parser = parserFactory.createTupleParser(ctx);
- } else {
- if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parser = getDelimitedDataTupleParser((ARecordType) atype);
-
- } else if (FORMAT_ADM.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parser = getADMDataTupleParser((ARecordType) atype);
- } else {
- throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
- }
- }
- return parser;
-
+ return parserFactory.createTupleParser(ctx);
}
protected void configureFormat() throws Exception {
- parserFactoryClassname = configuration.get(KEY_PARSER_FACTORY);
- userDefinedParser = (parserFactoryClassname != null);
-
+ String parserFactoryClassname = configuration.get(KEY_PARSER_FACTORY);
if (parserFactoryClassname == null) {
- if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parserFactoryClassname = formatToParserFactoryMap.get(FORMAT_DELIMITED_TEXT);
+ String specifiedFormat = configuration.get(KEY_FORMAT);
+ if (specifiedFormat == null) {
+ throw new IllegalArgumentException(" Unspecified data format");
+ } else if (FORMAT_DELIMITED_TEXT.equalsIgnoreCase(specifiedFormat)) {
+ parserFactory = getDelimitedDataTupleParserFactory((ARecordType) atype);
} else if (FORMAT_ADM.equalsIgnoreCase(configuration.get(KEY_FORMAT))) {
- parserFactoryClassname = formatToParserFactoryMap.get(FORMAT_ADM);
+ parserFactory = getADMDataTupleParserFactory((ARecordType) atype);
} else {
throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
}
+ } else {
+ parserFactory = (ITupleParserFactory) Class.forName(parserFactoryClassname).newInstance();
}
}
- protected ITupleParser getDelimitedDataTupleParser(ARecordType recordType) throws AsterixException {
- ITupleParser parser;
+ protected ITupleParserFactory getDelimitedDataTupleParserFactory(ARecordType recordType) throws AsterixException {
int n = recordType.getFieldTypes().length;
IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
for (int i = 0; i < n; i++) {
@@ -117,17 +107,12 @@
}
Character delimiter = delimiterValue.charAt(0);
- parser = new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter)
- .createTupleParser(ctx);
- return parser;
+ return new NtDelimitedDataTupleParserFactory(recordType, fieldParserFactories, delimiter);
}
- protected ITupleParser getADMDataTupleParser(ARecordType recordType) throws AsterixException {
+ protected ITupleParserFactory getADMDataTupleParserFactory(ARecordType recordType) throws AsterixException {
try {
- Class tupleParserFactoryClass = Class.forName(parserFactoryClassname);
- Constructor ctor = tupleParserFactoryClass.getConstructor(ARecordType.class);
- ITupleParserFactory parserFactory = (ITupleParserFactory) ctor.newInstance(atype);
- return parserFactory.createTupleParser(ctx);
+ return new AdmSchemafullRecordParserFactory(recordType);
} catch (Exception e) {
throw new AsterixException(e);
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
index f7a79cd..1e05b2f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -18,7 +18,6 @@
import java.io.IOException;
import java.io.InputStream;
import java.net.UnknownHostException;
-import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -28,8 +27,6 @@
import java.util.logging.Level;
import java.util.logging.Logger;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.InputSplit;
@@ -40,32 +37,39 @@
import org.apache.hadoop.mapred.TextInputFormat;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.runtime.util.AsterixRuntimeUtil;
+import edu.uci.ics.asterix.om.util.AsterixRuntimeUtil;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.hadoop.util.InputSplitsProxy;
+/**
+ * Provides functionality for fetching external data stored in an HDFS instance.
+ */
+@SuppressWarnings({ "deprecation", "rawtypes" })
public class HDFSAdapter extends FileSystemBasedAdapter {
+ private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger.getLogger(HDFSAdapter.class.getName());
+ public static final String KEY_HDFS_URL = "hdfs";
+ public static final String KEY_INPUT_FORMAT = "input-format";
+ public static final String INPUT_FORMAT_TEXT = "text-input-format";
+ public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
+
private Object[] inputSplits;
private transient JobConf conf;
private InputSplitsProxy inputSplitsProxy;
- private static final Map<String, String> formatClassNames = new HashMap<String, String>();
+ private static final Map<String, String> formatClassNames = initInputFormatMap();
- public static final String KEY_HDFS_URL = "hdfs";
- public static final String KEY_HDFS_PATH = "path";
- public static final String KEY_INPUT_FORMAT = "input-format";
-
- public static final String INPUT_FORMAT_TEXT = "text-input-format";
- public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
-
- static {
+ private static Map<String, String> initInputFormatMap() {
+ Map<String, String> formatClassNames = new HashMap<String, String>();
formatClassNames.put(INPUT_FORMAT_TEXT, "org.apache.hadoop.mapred.TextInputFormat");
formatClassNames.put(INPUT_FORMAT_SEQUENCE, "org.apache.hadoop.mapred.SequenceFileInputFormat");
+ return formatClassNames;
}
public HDFSAdapter(IAType atype) {
@@ -77,77 +81,89 @@
configuration = arguments;
configureFormat();
configureJobConf();
- configurePartitionConstraint();
+ configureSplits();
+ }
+
+ private void configureSplits() throws IOException {
+ if (inputSplitsProxy == null) {
+ inputSplits = conf.getInputFormat().getSplits(conf, 0);
+ }
+ inputSplitsProxy = new InputSplitsProxy(conf, inputSplits);
}
private void configurePartitionConstraint() throws Exception {
- AlgebricksAbsolutePartitionConstraint absPartitionConstraint;
List<String> locations = new ArrayList<String>();
Random random = new Random();
- boolean couldConfigureLocationConstraints = true;
- if (inputSplitsProxy == null) {
- InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
- try {
- for (InputSplit inputSplit : inputSplits) {
- String[] dataNodeLocations = inputSplit.getLocations();
- // loop over all replicas until a split location coincides
- // with an asterix datanode location
- for (String datanodeLocation : dataNodeLocations) {
- Set<String> nodeControllersAtLocation = AsterixRuntimeUtil
- .getNodeControllersOnHostName(datanodeLocation);
- if (nodeControllersAtLocation == null || nodeControllersAtLocation.size() == 0) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.INFO, "No node controller found at " + datanodeLocation
- + " will look at replica location");
- }
- couldConfigureLocationConstraints = false;
- } else {
- int locationIndex = random.nextInt(nodeControllersAtLocation.size());
- String chosenLocation = (String) nodeControllersAtLocation.toArray()[locationIndex];
- locations.add(chosenLocation);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.INFO, "split : " + inputSplit + " to be processed by :"
- + chosenLocation);
- }
- couldConfigureLocationConstraints = true;
- break;
+ boolean couldConfigureLocationConstraints = false;
+ try {
+ Map<String, Set<String>> nodeControllers = AsterixRuntimeUtil.getNodeControllerMap();
+ for (Object inputSplit : inputSplits) {
+ String[] dataNodeLocations = ((InputSplit) inputSplit).getLocations();
+ if (dataNodeLocations == null || dataNodeLocations.length == 0) {
+ throw new IllegalArgumentException("No datanode locations found: check hdfs path");
+ }
+
+ // loop over all replicas until a split location coincides
+ // with an asterix datanode location
+ for (String datanodeLocation : dataNodeLocations) {
+ Set<String> nodeControllersAtLocation = null;
+ try {
+ nodeControllersAtLocation = nodeControllers.get(AsterixRuntimeUtil
+ .getIPAddress(datanodeLocation));
+ } catch (UnknownHostException uhe) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.log(Level.WARNING, "Unknown host :" + datanodeLocation);
}
+ continue;
}
-
- // none of the replica locations coincides with an Asterix
- // node controller location.
- if (!couldConfigureLocationConstraints) {
- List<String> allNodeControllers = AsterixRuntimeUtil.getAllNodeControllers();
- int locationIndex = random.nextInt(allNodeControllers.size());
- String chosenLocation = allNodeControllers.get(locationIndex);
- locations.add(chosenLocation);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.log(Level.INFO, "No local node controller found to process split : " + inputSplit
- + " will be processed by a remote node controller:" + chosenLocation);
+ if (nodeControllersAtLocation == null || nodeControllersAtLocation.size() == 0) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.log(Level.WARNING, "No node controller found at " + datanodeLocation
+ + " will look at replica location");
}
+ couldConfigureLocationConstraints = false;
+ } else {
+ int locationIndex = random.nextInt(nodeControllersAtLocation.size());
+ String chosenLocation = (String) nodeControllersAtLocation.toArray()[locationIndex];
+ locations.add(chosenLocation);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.log(Level.INFO, "split : " + inputSplit + " to be processed by :" + chosenLocation);
+ }
+ couldConfigureLocationConstraints = true;
break;
}
}
- if (couldConfigureLocationConstraints) {
- partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
- } else {
- partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
+
+ /* none of the replica locations coincides with an Asterix
+ node controller location.
+ */
+ if (!couldConfigureLocationConstraints) {
+ List<String> allNodeControllers = AsterixRuntimeUtil.getAllNodeControllers();
+ int locationIndex = random.nextInt(allNodeControllers.size());
+ String chosenLocation = allNodeControllers.get(locationIndex);
+ locations.add(chosenLocation);
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.log(Level.SEVERE, "No local node controller found to process split : " + inputSplit
+ + " will be processed by a remote node controller:" + chosenLocation);
+ }
}
- } catch (UnknownHostException e) {
- partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
}
- inputSplitsProxy = new InputSplitsProxy(conf, inputSplits);
+ partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
+ } catch (Exception e) {
+ if (LOGGER.isLoggable(Level.SEVERE)) {
+ LOGGER.log(Level.SEVERE, "Encountered exception :" + e + " using count constraints");
+ }
+ partitionConstraint = new AlgebricksCountPartitionConstraint(inputSplits.length);
}
}
private JobConf configureJobConf() throws Exception {
conf = new JobConf();
- conf.set("fs.default.name", configuration.get(KEY_HDFS_URL));
+ conf.set("fs.default.name", configuration.get(KEY_HDFS_URL).trim());
conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
conf.setClassLoader(HDFSAdapter.class.getClassLoader());
- conf.set("mapred.input.dir", configuration.get(KEY_HDFS_PATH));
- conf.set("mapred.input.format.class", formatClassNames.get(configuration.get(KEY_INPUT_FORMAT)));
+ conf.set("mapred.input.dir", configuration.get(KEY_PATH).trim());
+ conf.set("mapred.input.format.class", formatClassNames.get(configuration.get(KEY_INPUT_FORMAT).trim()));
return conf;
}
@@ -199,11 +215,10 @@
return reporter;
}
+ @SuppressWarnings("unchecked")
@Override
public InputStream getInputStream(int partition) throws IOException {
- Path path = new Path(inputSplits[partition].toString());
try {
- FileSystem fs = FileSystem.get(conf);
InputStream inputStream;
if (conf.getInputFormat() instanceof SequenceFileInputFormat) {
SequenceFileInputFormat format = (SequenceFileInputFormat) conf.getInputFormat();
@@ -227,61 +242,71 @@
}
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ configurePartitionConstraint();
+ }
+ return partitionConstraint;
+ }
+
}
class HDFSStream extends InputStream {
- private ByteBuffer buffer;
- private int capacity;
- private RecordReader reader;
- private boolean readNext = true;
+ private RecordReader<Object, Text> reader;
private final Object key;
private final Text value;
+ private boolean hasMore = false;
+ private static final int EOL = "\n".getBytes()[0];
+ private Text pendingValue = null;
- public HDFSStream(RecordReader reader, IHyracksTaskContext ctx) throws Exception {
- capacity = ctx.getFrameSize();
- buffer = ByteBuffer.allocate(capacity);
+ public HDFSStream(RecordReader<Object, Text> reader, IHyracksTaskContext ctx) throws Exception {
this.reader = reader;
key = reader.createKey();
try {
value = (Text) reader.createValue();
} catch (ClassCastException cce) {
- throw new Exception("context is not of type org.apache.hadoop.io.Text"
+ throw new Exception("value is not of type org.apache.hadoop.io.Text"
+ " type not supported in sequence file format", cce);
}
- initialize();
- }
-
- private void initialize() throws Exception {
- boolean hasMore = reader.next(key, value);
- if (!hasMore) {
- buffer.limit(0);
- } else {
- buffer.position(0);
- buffer.limit(capacity);
- buffer.put(value.getBytes());
- buffer.put("\n".getBytes());
- buffer.flip();
- }
}
@Override
- public int read() throws IOException {
- if (!buffer.hasRemaining()) {
- boolean hasMore = reader.next(key, value);
- if (!hasMore) {
- return -1;
- }
- buffer.position(0);
- buffer.limit(capacity);
- buffer.put(value.getBytes());
- buffer.put("\n".getBytes());
- buffer.flip();
- return buffer.get();
- } else {
- return buffer.get();
+ public int read(byte[] buffer, int offset, int len) throws IOException {
+ int numBytes = 0;
+ if (pendingValue != null) {
+ System.arraycopy(pendingValue.getBytes(), 0, buffer, offset + numBytes, pendingValue.getLength());
+ buffer[offset + numBytes + pendingValue.getLength()] = (byte) EOL;
+ numBytes += pendingValue.getLength() + 1;
+ pendingValue = null;
}
+ while (numBytes < len) {
+ hasMore = reader.next(key, value);
+ if (!hasMore) {
+ return (numBytes == 0) ? -1 : numBytes;
+ }
+ int sizeOfNextTuple = value.getLength() + 1;
+ if (numBytes + sizeOfNextTuple > len) {
+ // cannot add tuple to current buffer
+ // but the reader has moved pass the fetched tuple
+ // we need to store this for a subsequent read call.
+ // and return this then.
+ pendingValue = value;
+ break;
+ } else {
+ System.arraycopy(value.getBytes(), 0, buffer, offset + numBytes, value.getLength());
+ buffer[offset + numBytes + value.getLength()] = (byte) EOL;
+ numBytes += sizeOfNextTuple;
+ }
+ }
+ return numBytes;
+ }
+
+ @Override
+ public int read() throws IOException {
+ throw new NotImplementedException("Use read(byte[], int, int");
}
}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
index ffcc658..3731eba 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -17,11 +17,17 @@
import java.util.Map;
import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+/**
+ * Provides the functionality of fetching data in form of ADM records from a Hive dataset.
+ */
public class HiveAdapter extends AbstractDatasourceAdapter {
+ private static final long serialVersionUID = 1L;
+
public static final String HIVE_DATABASE = "database";
public static final String HIVE_TABLE = "table";
public static final String HIVE_HOME = "hive-home";
@@ -56,7 +62,7 @@
tablePath = configuration.get(HIVE_WAREHOUSE_DIR) + "/" + tablePath + ".db" + "/"
+ configuration.get(HIVE_TABLE);
}
- configuration.put(HDFSAdapter.KEY_HDFS_PATH, tablePath);
+ configuration.put(HDFSAdapter.KEY_PATH, tablePath);
if (!configuration.get(KEY_FORMAT).equals(FORMAT_DELIMITED_TEXT)) {
throw new IllegalArgumentException("format" + configuration.get(KEY_FORMAT) + " is not supported");
}
@@ -81,4 +87,9 @@
hdfsAdapter.start(partition, writer);
}
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return hdfsAdapter.getPartitionConstraint();
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
index ccfb9bd..b0dc32f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IDatasourceAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -29,7 +29,6 @@
*/
public interface IDatasourceAdapter extends Serializable {
-
/**
* An adapter can be used to read from an external data source and may also
* allow writing to the external data source. This enum type indicates the
@@ -46,7 +45,6 @@
READ_WRITE
}
-
/**
* Returns the type of adapter indicating if the adapter can be used for
* reading from an external data source or writing to an external data
@@ -75,19 +73,6 @@
public String getAdapterProperty(String propertyKey);
/**
- * Allows setting a configuration property of the adapter with a specified
- * value.
- *
- * @caller Used by the wrapper operator to modify the behavior of the
- * adapter, if required.
- * @param property
- * the property to be set
- * @param value
- * the value for the property
- */
- public void setAdapterProperty(String property, String value);
-
- /**
* Configures the IDatasourceAdapter instance.
*
* @caller Scenario 1) Called during compilation of DDL statement that
@@ -131,9 +116,8 @@
* @Caller The wrapper operator configures its partition constraints from
* the constraints obtained from the adapter.
*/
- public AlgebricksPartitionConstraint getPartitionConstraint();
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception;
-
/**
* Allows the adapter to establish connection with the external data source
* expressing intent for data and providing any configuration parameters
@@ -148,5 +132,18 @@
*/
public void initialize(IHyracksTaskContext ctx) throws Exception;
+ /**
+ * Triggers the adapter to begin ingestion of data from the external source.
+ *
+ * @param partition
+ * The adapter could be running with a degree of parallelism.
+ * partition corresponds to the i'th parallel instance.
+ * @param writer
+ * The instance of frame writer that is used by the adapter to
+ * write frame to. Adapter packs the fetched bytes (from external source),
+ * packs them into frames and forwards the frames to an upstream receiving
+ * operator using the instance of IFrameWriter.
+ * @throws Exception
+ */
public void start(int partition, IFrameWriter writer) throws Exception;
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java
deleted file mode 100644
index 282be78..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import edu.uci.ics.asterix.om.types.ARecordType;
-
-public interface IFeedDatasourceAdapter extends IDatasourceAdapter {
-
- /**
- * Represents the kind of data exchange that happens between the adapter and
- * the external data source. The data exchange can be either pull based or
- * push based. In the former case (pull), the request for data transfer is
- * initiated by the adapter. In the latter case (push) the adapter is
- * required to submit an initial request to convey intent for data.
- * Subsequently all data transfer requests are initiated by the external
- * data source.
- */
- public enum AdapterDataFlowType {
- PULL,
- PUSH
- }
-
- /**
- * An adapter can be a pull or a push based adapter. This method returns the
- * kind of adapter, that is whether it is a pull based adapter or a push
- * based adapter.
- *
- * @caller Compiler or wrapper operator: Compiler uses this API to choose
- * the right wrapper (push-based) operator that wraps around the
- * adapter and provides an iterator interface. If we decide to form
- * a single operator that handles both pull and push based adapter
- * kinds, then this method will be used by the wrapper operator for
- * switching between the logic for interacting with a pull based
- * adapter versus a push based adapter.
- * @return AdapterDataFlowType
- */
- public AdapterDataFlowType getAdapterDataFlowType();
-
- public ARecordType getAdapterOutputType();
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
new file mode 100644
index 0000000..a1eb075
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+public interface IPullBasedFeedClient {
+
+ /**
+ * Writes the next fetched tuple into the provided instance of DatatOutput.
+ *
+ * @param dataOutput
+ * The receiving channel for the feed client to write ADM records to.
+ * @return true if a record was written to the DataOutput instance
+ * false if no record was written to the DataOutput instance indicating non-availability of new data.
+ * @throws AsterixException
+ */
+ public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
+
+ /**
+ * Provides logic for any corrective action that feed client needs to execute on
+ * encountering an exception.
+ *
+ * @param e
+ * The exception encountered during fetching of data from external source
+ * @throws AsterixException
+ */
+ public void resetOnFailure(Exception e) throws AsterixException;
+
+ /**
+ * Terminates a feed, that is data ingestion activity ceases.
+ *
+ * @throws Exception
+ */
+ public void stop() throws Exception;
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
new file mode 100644
index 0000000..3a4b97b
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/ITypedDatasourceAdapter.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+/**
+ * Implemented by datasource adapter that has a fixed output type.
+ * Example @see {PullBasedTwitterAdapter}
+ */
+public interface ITypedDatasourceAdapter extends IDatasourceAdapter {
+
+ public ARecordType getAdapterOutputType();
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
index 324e227..bcc90c8 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/NCFileSystemAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -23,75 +23,86 @@
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+/**
+ * Factory class for creating an instance of NCFileSystemAdapter. An
+ * NCFileSystemAdapter reads external data residing on the local file system of
+ * an NC.
+ */
public class NCFileSystemAdapter extends FileSystemBasedAdapter {
- private static final long serialVersionUID = -4154256369973615710L;
- private FileSplit[] fileSplits;
+ private static final long serialVersionUID = 1L;
+ private FileSplit[] fileSplits;
- public static final String KEY_SPLITS = "path";
-
- public NCFileSystemAdapter(IAType atype) {
- super(atype);
- }
+ public NCFileSystemAdapter(IAType atype) {
+ super(atype);
+ }
- @Override
- public void configure(Map<String, String> arguments) throws Exception {
- this.configuration = arguments;
- String[] splits = arguments.get(KEY_SPLITS).split(",");
- configureFileSplits(splits);
- configurePartitionConstraint();
- configureFormat();
- }
+ @Override
+ public void configure(Map<String, String> arguments) throws Exception {
+ this.configuration = arguments;
+ String[] splits = arguments.get(KEY_PATH).split(",");
+ configureFileSplits(splits);
+ configureFormat();
+ }
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- }
+ @Override
+ public void initialize(IHyracksTaskContext ctx) throws Exception {
+ this.ctx = ctx;
+ }
- @Override
- public AdapterType getAdapterType() {
- return AdapterType.READ;
- }
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.READ;
+ }
- private void configureFileSplits(String[] splits) {
- if (fileSplits == null) {
- fileSplits = new FileSplit[splits.length];
- String nodeName;
- String nodeLocalPath;
- int count = 0;
- for (String splitPath : splits) {
- nodeName = splitPath.split(":")[0];
- nodeLocalPath = splitPath.split("://")[1];
- FileSplit fileSplit = new FileSplit(nodeName,
- new FileReference(new File(nodeLocalPath)));
- fileSplits[count++] = fileSplit;
- }
- }
- }
+ private void configureFileSplits(String[] splits) {
+ if (fileSplits == null) {
+ fileSplits = new FileSplit[splits.length];
+ String nodeName;
+ String nodeLocalPath;
+ int count = 0;
+ String trimmedValue;
+ for (String splitPath : splits) {
+ trimmedValue = splitPath.trim();
+ nodeName = trimmedValue.split(":")[0];
+ nodeLocalPath = trimmedValue.split("://")[1];
+ FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
+ fileSplits[count++] = fileSplit;
+ }
+ }
+ }
- private void configurePartitionConstraint() {
- String[] locs = new String[fileSplits.length];
- for (int i = 0; i < fileSplits.length; i++) {
- locs[i] = fileSplits[i].getNodeName();
- }
- partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
- }
+ private void configurePartitionConstraint() {
+ String[] locs = new String[fileSplits.length];
+ for (int i = 0; i < fileSplits.length; i++) {
+ locs[i] = fileSplits[i].getNodeName();
+ }
+ partitionConstraint = new AlgebricksAbsolutePartitionConstraint(locs);
+ }
- @Override
- public InputStream getInputStream(int partition) throws IOException {
- FileSplit split = fileSplits[partition];
- File inputFile = split.getLocalFile().getFile();
- InputStream in;
- try {
- in = new FileInputStream(inputFile);
- return in;
- } catch (FileNotFoundException e) {
- throw new IOException(e);
- }
- }
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ FileSplit split = fileSplits[partition];
+ File inputFile = split.getLocalFile().getFile();
+ InputStream in;
+ try {
+ in = new FileInputStream(inputFile);
+ return in;
+ } catch (FileNotFoundException e) {
+ throw new IOException(e);
+ }
+ }
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ configurePartitionConstraint();
+ }
+ return partitionConstraint;
+ }
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
index 326775a..38686c2 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
@@ -17,17 +17,25 @@
import java.nio.ByteBuffer;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-public abstract class PullBasedAdapter extends AbstractFeedDatasourceAdapter implements IDatasourceAdapter {
+/**
+ * Acts as an abstract class for all pull-based external data adapters.
+ * Captures the common logic for obtaining bytes from an external source
+ * and packing them into frames as tuples.
+ */
+public abstract class PullBasedAdapter extends AbstractDatasourceAdapter implements ITypedDatasourceAdapter {
+
+ private static final long serialVersionUID = 1L;
protected ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
protected IPullBasedFeedClient pullBasedFeedClient;
+ protected ARecordType adapterOutputType;
private FrameTupleAppender appender;
private ByteBuffer frame;
@@ -40,10 +48,18 @@
appender.reset(frame, true);
pullBasedFeedClient = getFeedClient(partition);
+ boolean moreData = false;
while (true) {
tupleBuilder.reset();
try {
- pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput()); // nextTuple is a blocking call.
+ moreData = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput());
+ if (moreData) {
+ tupleBuilder.addFieldEndOffset();
+ appendTupleToFrame(writer);
+ } else {
+ FrameUtils.flushFrame(frame, writer);
+ break;
+ }
} catch (Exception failureException) {
try {
pullBasedFeedClient.resetOnFailure(failureException);
@@ -51,14 +67,15 @@
} catch (Exception recoveryException) {
throw new Exception(recoveryException);
}
-
}
- tupleBuilder.addFieldEndOffset();
- appendTupleToFrame(writer);
-
}
}
+ /**
+ * Allows an adapter to handle a runtime exception.
+ * @param e exception encountered during runtime
+ * @throws AsterixException
+ */
public void resetOnFailure(Exception e) throws AsterixException {
pullBasedFeedClient.resetOnFailure(e);
tupleBuilder.reset();
@@ -75,4 +92,9 @@
}
}
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return adapterOutputType;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
new file mode 100644
index 0000000..17ecd86
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
+import edu.uci.ics.asterix.om.base.AMutableRecord;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
+
+ protected ARecordSerializerDeserializer recordSerDe;
+ protected AMutableRecord mutableRecord;
+ protected boolean messageReceived;
+ protected boolean continueIngestion=true;
+
+ public abstract boolean setNextRecord() throws Exception;
+
+ @Override
+ public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
+ try {
+ boolean newData = setNextRecord();
+ if (newData && continueIngestion) {
+ IAType t = mutableRecord.getType();
+ ATypeTag tag = t.getTypeTag();
+ try {
+ dataOutput.writeByte(tag.serialize());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ recordSerDe.serialize(mutableRecord, dataOutput);
+ return true;
+ }
+ return false;
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ @Override
+ public void stop() {
+ continueIngestion = false;
+ }
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
index a7fd3dc..ebfbcad 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -17,103 +17,92 @@
import java.util.HashMap;
import java.util.Map;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
-import edu.uci.ics.asterix.feed.intake.PullBasedTwitterFeedClient;
import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-public class PullBasedTwitterAdapter extends PullBasedAdapter implements
- IManagedFeedAdapter, IMutableFeedAdapter {
+/**
+ * An adapter that provides the functionality of receiving tweets from the
+ * Twitter service in the form of ADM formatted records.
+ */
+public class PullBasedTwitterAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
- private int interval = 10;
- private boolean stopRequested = false;
- private boolean alterRequested = false;
- private Map<String, String> alteredParams = new HashMap<String, String>();
- private ARecordType recordType;
+
+ private static final long serialVersionUID = 1L;
+
+ public static final String QUERY = "query";
+ public static final String INTERVAL = "interval";
- private String[] fieldNames = { "id", "username", "location", "text",
- "timestamp" };
- private IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING,
- BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING };
+ private boolean alterRequested = false;
+ private Map<String, String> alteredParams = new HashMap<String, String>();
+ private ARecordType recordType;
- private PullBasedTwitterFeedClient tweetClient;
+ private PullBasedTwitterFeedClient tweetClient;
- public static final String QUERY = "query";
- public static final String INTERVAL = "interval";
+ @Override
+ public IPullBasedFeedClient getFeedClient(int partition) {
+ return tweetClient;
+ }
- @Override
- public IPullBasedFeedClient getFeedClient(int partition) {
- return tweetClient;
- }
+ @Override
+ public void configure(Map<String, String> arguments) throws Exception {
+ configuration = arguments;
+ String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
+ IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+ BuiltinType.ASTRING };
+ recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes, false);
+ }
- @Override
- public void configure(Map<String, String> arguments) throws Exception {
- configuration = arguments;
- partitionConstraint = new AlgebricksCountPartitionConstraint(1);
- interval = Integer.parseInt(arguments.get(INTERVAL));
- recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes,
- false);
- }
+ @Override
+ public void initialize(IHyracksTaskContext ctx) throws Exception {
+ this.ctx = ctx;
+ tweetClient = new PullBasedTwitterFeedClient(ctx, this);
+ }
- @Override
- public void initialize(IHyracksTaskContext ctx) throws Exception {
- this.ctx = ctx;
- tweetClient = new PullBasedTwitterFeedClient(ctx, this);
- }
+ @Override
+ public AdapterType getAdapterType() {
+ return AdapterType.READ;
+ }
- @Override
- public AdapterType getAdapterType() {
- return adapterType.READ;
- }
+ @Override
+ public void stop() {
+ tweetClient.stop();
+ }
- @Override
- public void suspend() throws Exception {
- // TODO Auto-generated method stub
+ @Override
+ public void alter(Map<String, String> properties) {
+ alterRequested = true;
+ this.alteredParams = properties;
+ }
- }
+ public boolean isAlterRequested() {
+ return alterRequested;
+ }
- @Override
- public void resume() throws Exception {
- // TODO Auto-generated method stub
+ public Map<String, String> getAlteredParams() {
+ return alteredParams;
+ }
- }
+ public void postAlteration() {
+ alteredParams = null;
+ alterRequested = false;
+ }
- @Override
- public void stop() throws Exception {
- stopRequested = true;
- }
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return recordType;
+ }
- public boolean isStopRequested() {
- return stopRequested;
- }
-
- @Override
- public void alter(Map<String, String> properties) throws Exception {
- alterRequested = true;
- this.alteredParams = properties;
- }
-
- public boolean isAlterRequested() {
- return alterRequested;
- }
-
- public Map<String, String> getAlteredParams() {
- return alteredParams;
- }
-
- public void postAlteration() {
- alteredParams = null;
- alterRequested = false;
- }
-
- @Override
- public ARecordType getAdapterOutputType() {
- return recordType;
- }
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ partitionConstraint = new AlgebricksCountPartitionConstraint(1);
+ }
+ return partitionConstraint;
+ }
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
similarity index 78%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
index c1ea800..2a07472 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedTwitterFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
@@ -1,4 +1,18 @@
-package edu.uci.ics.asterix.feed.intake;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
import java.util.LinkedList;
import java.util.Map;
@@ -12,13 +26,17 @@
import twitter4j.TwitterFactory;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
-import edu.uci.ics.asterix.external.dataset.adapter.PullBasedTwitterAdapter;
import edu.uci.ics.asterix.om.base.AMutableRecord;
import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+/**
+ * An implementation of @see {PullBasedFeedClient} for the Twitter service.
+ * The feed client fetches data from Twitter service by sending request at
+ * regular (configurable) interval.
+ */
public class PullBasedTwitterFeedClient extends PullBasedFeedClient {
private String keywords;
@@ -54,7 +72,7 @@
private Tweet getNextTweet() throws TwitterException, InterruptedException {
if (tweetBuffer.isEmpty()) {
QueryResult result;
- Thread.currentThread().sleep(1000 * requestInterval);
+ Thread.sleep(1000 * requestInterval);
result = twitter.search(query);
tweetBuffer.addAll(result.getTweets());
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
index 124a9ed..611183c 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -19,24 +19,26 @@
import java.util.List;
import java.util.Map;
-import edu.uci.ics.asterix.feed.intake.IPullBasedFeedClient;
-import edu.uci.ics.asterix.feed.intake.RSSFeedClient;
import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
-import edu.uci.ics.asterix.feed.managed.adapter.IMutableFeedAdapter;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter, IMutableFeedAdapter {
+/**
+ * RSSFeedAdapter provides the functionality of fetching an RSS based feed.
+ */
+public class RSSFeedAdapter extends PullBasedAdapter implements IManagedFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
private List<String> feedURLs = new ArrayList<String>();
private boolean isStopRequested = false;
private boolean isAlterRequested = false;
private Map<String, String> alteredParams = new HashMap<String, String>();
private String id_prefix = "";
- private int interval = 10;
private ARecordType recordType;
private IPullBasedFeedClient rssFeedClient;
@@ -53,40 +55,18 @@
}
@Override
- public void alter(Map<String, String> properties) throws Exception {
+ public void alter(Map<String, String> properties) {
isAlterRequested = true;
this.alteredParams = properties;
reconfigure(properties);
}
- public void postAlteration() {
- alteredParams = null;
- isAlterRequested = false;
- }
-
@Override
- public void suspend() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void resume() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void stop() throws Exception {
+ public void stop() {
isStopRequested = true;
}
@Override
- public AdapterDataFlowType getAdapterDataFlowType() {
- return AdapterDataFlowType.PULL;
- }
-
- @Override
public AdapterType getAdapterType() {
return AdapterType.READ;
}
@@ -151,4 +131,12 @@
return recordType;
}
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ if (partitionConstraint == null) {
+ configurePartitionConstraints();
+ }
+ return partitionConstraint;
+ }
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
similarity index 86%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
index 9f04500..366b4af 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/RSSFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
@@ -1,4 +1,18 @@
-package edu.uci.ics.asterix.feed.intake;
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.dataset.adapter;
import java.net.MalformedURLException;
import java.net.URL;
@@ -15,12 +29,15 @@
import com.sun.syndication.fetcher.impl.HashMapFeedInfoCache;
import com.sun.syndication.fetcher.impl.HttpURLFeedFetcher;
-import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
import edu.uci.ics.asterix.om.base.AMutableRecord;
import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.base.IAObject;
import edu.uci.ics.asterix.om.types.ARecordType;
+/**
+ * An implementation of @see {PullBasedFeedClient} responsible for
+ * fetching from an RSS feed source at regular interval.
+ */
@SuppressWarnings("rawtypes")
public class RSSFeedClient extends PullBasedFeedClient {
@@ -93,6 +110,7 @@
}
}
+ @SuppressWarnings("unchecked")
private void fetchFeed() {
try {
System.err.println("Retrieving feed " + feedURL);
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
similarity index 67%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
index d34af73..537bf07 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/AlterFeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/AlterFeedMessage.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,31 +12,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.util.Map;
+/**
+ * A feed control message containing the altered values for
+ * adapter configuration parameters. This message is dispatched
+ * to all runtime instances of the feed's adapter.
+ */
public class AlterFeedMessage extends FeedMessage {
+ private static final long serialVersionUID = 1L;
+
private final Map<String, String> alteredConfParams;
public AlterFeedMessage(Map<String, String> alteredConfParams) {
super(MessageType.ALTER);
- messageResponseMode = MessageResponseMode.SYNCHRONOUS;
this.alteredConfParams = alteredConfParams;
}
- public AlterFeedMessage(MessageResponseMode mode, Map<String, String> alteredConfParams) {
- super(MessageType.ALTER);
- messageResponseMode = mode;
- this.alteredConfParams = alteredConfParams;
- }
-
- @Override
- public MessageResponseMode getMessageResponseMode() {
- return messageResponseMode;
- }
-
@Override
public MessageType getMessageType() {
return MessageType.ALTER;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
similarity index 75%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
index 19076c4..b1889ee 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedId.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedId.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -11,19 +11,26 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
-*/
-package edu.uci.ics.asterix.feed.mgmt;
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.io.Serializable;
+/**
+ * A unique identifier for a feed (dataset).
+ */
public class FeedId implements Serializable {
+ private static final long serialVersionUID = 1L;
+
private final String dataverse;
private final String dataset;
+ private final int hashcode;
public FeedId(String dataverse, String dataset) {
this.dataset = dataset;
this.dataverse = dataverse;
+ this.hashcode = (dataverse + "." + dataset).hashCode();
}
public String getDataverse() {
@@ -47,7 +54,12 @@
@Override
public int hashCode() {
- return dataverse.hashCode() + dataset.hashCode();
+ return hashcode;
+ }
+
+ @Override
+ public String toString() {
+ return dataverse + "." + dataset;
}
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
new file mode 100644
index 0000000..29e4486
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedManager.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+/**
+ * Handle (de-)registration of feeds for delivery of control messages.
+ */
+public class FeedManager implements IFeedManager {
+
+ public static FeedManager INSTANCE = new FeedManager();
+
+ private FeedManager() {
+
+ }
+
+ private Map<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>> outGoingMsgQueueMap = new HashMap<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>>();
+
+ @Override
+ public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException {
+ Set<LinkedBlockingQueue<IFeedMessage>> operatorQueues = outGoingMsgQueueMap.get(feedId);
+ try {
+ if (operatorQueues != null) {
+ for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
+ queue.put(feedMessage);
+ }
+ } else {
+ throw new AsterixException("Unable to deliver message. Unknown feed :" + feedId);
+ }
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ @Override
+ public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
+ Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
+ if (feedQueues == null) {
+ feedQueues = new HashSet<LinkedBlockingQueue<IFeedMessage>>();
+ }
+ feedQueues.add(queue);
+ outGoingMsgQueueMap.put(feedId, feedQueues);
+ }
+
+ @Override
+ public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
+ Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
+ if (feedQueues == null || !feedQueues.contains(queue)) {
+ throw new IllegalArgumentException(" Unable to de-register feed message queue. Unknown feedId " + feedId);
+ }
+ feedQueues.remove(queue);
+ }
+
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
similarity index 60%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
index 1f1a020..af84d4f 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/FeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/FeedMessage.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,37 +12,28 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
-public class FeedMessage implements IFeedMessage {
+/**
+ * A control message that can be sent to the runtime instance of a
+ * feed's adapter.
+ */
+public class FeedMessage implements IFeedMessage {
- protected MessageResponseMode messageResponseMode = MessageResponseMode.SYNCHRONOUS;
+ private static final long serialVersionUID = 1L;
+
protected MessageType messageType;
-
- public FeedMessage(MessageType messageType){
+ public FeedMessage(MessageType messageType) {
this.messageType = messageType;
}
-
- public MessageResponseMode getMessageResponseMode() {
- return messageResponseMode;
- }
-
-
- public void setMessageResponseMode(MessageResponseMode messageResponseMode) {
- this.messageResponseMode = messageResponseMode;
- }
-
-
public MessageType getMessageType() {
return messageType;
}
-
public void setMessageType(MessageType messageType) {
this.messageType = messageType;
}
-
-
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
new file mode 100644
index 0000000..587d5a7
--- /dev/null
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedManager.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.external.feed.lifecycle;
+
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+
+/**
+ * Handle (de-)registration of feeds for delivery of control messages.
+ */
+public interface IFeedManager {
+
+ /**
+ * Register an input message queue for a feed specified by feedId.
+ * All messages sent to a feed are directed to the registered queue(s).
+ *
+ * @param feedId
+ * an identifier for the feed dataset.
+ * @param queue
+ * an input message queue for receiving control messages.
+ */
+ public void registerFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
+
+ /**
+ * Unregister an input message queue for a feed specified by feedId.
+ * A feed prior to finishing should unregister all previously registered queue(s)
+ * as it is no longer active and thus need not process any control messages.
+ *
+ * @param feedId
+ * an identifier for the feed dataset.
+ * @param queue
+ * an input message queue for receiving control messages.
+ */
+ public void unregisterFeedMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
+
+ /**
+ * Deliver a message to a feed with a given feedId.
+ *
+ * @param feedId
+ * identifier for the feed dataset.
+ * @param feedMessage
+ * control message that needs to be delivered.
+ * @throws Exception
+ */
+ public void deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws AsterixException;
+}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
similarity index 72%
rename from asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
rename to asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
index dfb4f91..9e1e907 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/comm/IFeedMessage.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/feed/lifecycle/IFeedMessage.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -12,26 +12,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.asterix.feed.comm;
+package edu.uci.ics.asterix.external.feed.lifecycle;
import java.io.Serializable;
public interface IFeedMessage extends Serializable {
- public enum MessageResponseMode {
- SYNCHRONOUS,
- ASYNCHRONOUS,
- }
-
public enum MessageType {
STOP,
- SUSPEND,
- RESUME,
ALTER,
}
- public MessageResponseMode getMessageResponseMode();
-
public MessageType getMessageType();
-
+
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java
deleted file mode 100644
index f0e34c3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IFeedClient.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package edu.uci.ics.asterix.feed.intake;
-
-public interface IFeedClient {
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java
deleted file mode 100644
index f6b37b3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/IPullBasedFeedClient.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package edu.uci.ics.asterix.feed.intake;
-
-import java.io.DataOutput;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public interface IPullBasedFeedClient {
-
- public enum status {
- MORE_DATA,
- END_OF_DATA
- }
-
- public boolean nextTuple(DataOutput dataOutput) throws AsterixException;
-
- public void resetOnFailure(Exception e) throws AsterixException;
-
- public void suspend() throws Exception;
-
- public void resume() throws Exception;
-
- public void stop() throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java
deleted file mode 100644
index 501acd4..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/intake/PullBasedFeedClient.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.intake;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
-import edu.uci.ics.asterix.om.base.AMutableRecord;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
-
- protected ARecordSerializerDeserializer recordSerDe;
- protected AMutableRecord mutableRecord;
- protected boolean messageReceived;
-
- public abstract boolean setNextRecord() throws Exception;
-
- @Override
- public boolean nextTuple(DataOutput dataOutput) throws AsterixException {
- try {
- boolean newData = setNextRecord();
- if (newData) {
- IAType t = mutableRecord.getType();
- ATypeTag tag = t.getTypeTag();
- try {
- dataOutput.writeByte(tag.serialize());
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- recordSerDe.serialize(mutableRecord, dataOutput);
- return true;
- }
- return false;
- } catch (Exception e) {
- throw new AsterixException(e);
- }
-
- }
-
- /*
- * public void displayFeedRecord() { StringBuilder builder = new
- * StringBuilder(); int numFields = recordType.getFieldNames().length; for
- * (int i = 0; i < numFields; i++) {
- * builder.append(mutableRecord.getValueByPos(i).toString());
- * builder.append("|"); } }
- */
-
- @Override
- public void suspend() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void resume() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void stop() throws Exception {
- // TODO Auto-generated method stub
-
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
index 7cf1fb1..6f993ae 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IManagedFeedAdapter.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2009-2011 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
@@ -14,24 +14,30 @@
*/
package edu.uci.ics.asterix.feed.managed.adapter;
+import java.util.Map;
+
+/**
+ * Interface implemented by an adapter that can be controlled or managed by external
+ * commands (stop,alter)
+ */
public interface IManagedFeedAdapter {
- public enum OperationState {
- SUSPENDED,
- // INACTIVE state signifies that the feed dataset is not
- // connected with the external world through the feed
- // adapter.
- ACTIVE,
- // ACTIVE state signifies that the feed dataset is connected to the
- // external world using an adapter that may put data into the dataset.
- STOPPED,
- INACTIVE
- }
+ /**
+ * Discontinue the ingestion of data and end the feed.
+ *
+ * @throws Exception
+ */
+ public void stop();
- public void suspend() throws Exception;
-
- public void resume() throws Exception;
-
- public void stop() throws Exception;
+ /**
+ * Modify the adapter configuration parameters. This method is called
+ * when the configuration parameters need to be modified while the adapter
+ * is ingesting data in an active feed.
+ *
+ * @param properties
+ * A HashMap containing the set of configuration parameters
+ * that need to be altered.
+ */
+ public void alter(Map<String, String> properties);
}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
deleted file mode 100644
index 290c7d6..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/managed/adapter/IMutableFeedAdapter.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.managed.adapter;
-
-import java.util.Map;
-
-public interface IMutableFeedAdapter extends IManagedFeedAdapter {
-
- public void alter(Map<String,String> properties) throws Exception;
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java
deleted file mode 100644
index 748455a..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedManager.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-
-public class FeedManager implements IFeedManager {
-
- private Map<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>> outGoingMsgQueueMap = new HashMap<FeedId, Set<LinkedBlockingQueue<IFeedMessage>>>();
- private LinkedBlockingQueue<IFeedMessage> incomingMsgQueue = new LinkedBlockingQueue<IFeedMessage>();
-
- @Override
- public boolean deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws Exception {
- Set<LinkedBlockingQueue<IFeedMessage>> operatorQueues = outGoingMsgQueueMap.get(feedId);
- if (operatorQueues != null) {
- for (LinkedBlockingQueue<IFeedMessage> queue : operatorQueues) {
- queue.put(feedMessage);
- }
- }
- return true;
- }
-
- @Override
- public void registerFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
- Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
- if (feedQueues == null) {
- feedQueues = new HashSet<LinkedBlockingQueue<IFeedMessage>>();
- }
- feedQueues.add(queue);
- outGoingMsgQueueMap.put(feedId, feedQueues);
-
- }
-
- @Override
- public void unregisterFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue) {
- Set<LinkedBlockingQueue<IFeedMessage>> feedQueues = outGoingMsgQueueMap.get(feedId);
- if (feedQueues == null || !feedQueues.contains(queue)) {
- throw new IllegalArgumentException(" unable to de-register feed message queue");
- }
- feedQueues.remove(queue);
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java
deleted file mode 100644
index 82fb67d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/FeedSystemProvider.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-
-/**
- * Provider for all the sub-systems (transaction/lock/log/recovery) managers.
- * Users of transaction sub-systems must obtain them from the provider.
- */
-public class FeedSystemProvider {
- private static final IFeedManager feedManager = new FeedManager();
-
- public static IFeedManager getFeedManager() {
- return feedManager;
- }
-}
\ No newline at end of file
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java
deleted file mode 100644
index a8c1303..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/mgmt/IFeedManager.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.mgmt;
-
-import java.util.concurrent.LinkedBlockingQueue;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-
-public interface IFeedManager {
-
- public void registerFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
- public void unregisterFeedOperatorMsgQueue(FeedId feedId, LinkedBlockingQueue<IFeedMessage> queue);
-
- public boolean deliverMessage(FeedId feedId, IFeedMessage feedMessage) throws Exception;
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java
deleted file mode 100644
index c300b0d..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedIntakeOperatorDescriptor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory.FeedAdapterType;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.dataset.adapter.IFeedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class FeedIntakeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
- private static final long serialVersionUID = 1L;
-
- private final String adapterFactoryClassName;
- private final Map<String, String> adapterConfiguration;
- private final IAType atype;
- private final FeedId feedId;
-
- private transient IFeedDatasetAdapterFactory datasourceAdapterFactory;
-
- public FeedIntakeOperatorDescriptor(JobSpecification spec, FeedId feedId, String adapter,
- Map<String, String> arguments, ARecordType atype, RecordDescriptor rDesc) {
- super(spec, 1, 1);
- recordDescriptors[0] = rDesc;
- this.adapterFactoryClassName = adapter;
- this.adapterConfiguration = arguments;
- this.atype = atype;
- this.feedId = feedId;
- }
-
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
- IFeedDatasourceAdapter adapter;
- try {
- datasourceAdapterFactory = (IFeedDatasetAdapterFactory) Class.forName(adapterFactoryClassName)
- .newInstance();
- if (datasourceAdapterFactory.getFeedAdapterType().equals(FeedAdapterType.GENERIC)) {
- adapter = (IFeedDatasourceAdapter) ((IGenericFeedDatasetAdapterFactory) datasourceAdapterFactory)
- .createAdapter(adapterConfiguration, atype);
- } else {
- adapter = (IFeedDatasourceAdapter) ((ITypedFeedDatasetAdapterFactory) datasourceAdapterFactory)
- .createAdapter(adapterConfiguration);
- }
- adapter.initialize(ctx);
- } catch (Exception e) {
- throw new HyracksDataException("initialization of adapter failed", e);
- }
- return new FeedIntakeOperatorNodePushable(feedId, adapter, partition);
- }
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java
deleted file mode 100644
index c66c49e..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorDescriptor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.List;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class FeedMessageOperatorDescriptor extends
- AbstractSingleActivityOperatorDescriptor {
-
- private final FeedId feedId;
- private final List<IFeedMessage> feedMessages;
- private final boolean sendToAll = true;
-
- public FeedMessageOperatorDescriptor(JobSpecification spec,
- String dataverse, String dataset, List<IFeedMessage> feedMessages) {
- super(spec, 0, 1);
- this.feedId = new FeedId(dataverse, dataset);
- this.feedMessages = feedMessages;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) throws HyracksDataException {
- return new FeedMessageOperatorNodePushable(ctx, feedId, feedMessages,
- sendToAll, partition, nPartitions);
- }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java
deleted file mode 100644
index 37e3ba3..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/feed/operator/FeedMessageOperatorNodePushable.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.feed.operator;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.mgmt.FeedSystemProvider;
-import edu.uci.ics.asterix.feed.mgmt.IFeedManager;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-
-public class FeedMessageOperatorNodePushable extends
- AbstractUnaryOutputSourceOperatorNodePushable {
-
- private final FeedId feedId;
- private final List<IFeedMessage> feedMessages;
- private IFeedManager feedManager;
-
- public FeedMessageOperatorNodePushable(IHyracksTaskContext ctx,
- FeedId feedId, List<IFeedMessage> feedMessages, boolean applyToAll,
- int partition, int nPartitions) {
- this.feedId = feedId;
- if (applyToAll) {
- this.feedMessages = feedMessages;
- } else {
- this.feedMessages = new ArrayList<IFeedMessage>();
- feedMessages.add(feedMessages.get(partition));
- }
- feedManager = (IFeedManager) FeedSystemProvider.getFeedManager();
- }
-
- @Override
- public void initialize() throws HyracksDataException {
- try {
- writer.open();
- for (IFeedMessage feedMessage : feedMessages) {
- feedManager.deliverMessage(feedId, feedMessage);
- }
- } catch (Exception e) {
- throw new HyracksDataException(e);
- } finally {
- writer.close();
- }
- }
-
-}
diff --git a/asterix-metadata/pom.xml b/asterix-metadata/pom.xml
index 67f16ff..459d4d0 100644
--- a/asterix-metadata/pom.xml
+++ b/asterix-metadata/pom.xml
@@ -5,10 +5,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-metadata</artifactId>
- <version>0.0.4-SNAPSHOT</version>
-
<build>
<plugins>
<plugin>
@@ -45,12 +42,10 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-invertedindex</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
index ec46e7b..1db4886 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -137,12 +137,18 @@
private static final ARecordType createFeedDetailsRecordType() {
AOrderedListType orderedListType = new AOrderedListType(BuiltinType.ASTRING, null);
- AOrderedListType orderedListOfPropertiesType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE, null);
+ AOrderedListType orderedListOfPropertiesType = new AOrderedListType(DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE,
+ null);
String[] fieldNames = { "FileStructure", "PartitioningStrategy", "PartitioningKey", "PrimaryKey", "GroupName",
"DatasourceAdapter", "Properties", "Function", "Status" };
+ List<IAType> feedFunctionUnionList = new ArrayList<IAType>();
+ feedFunctionUnionList.add(BuiltinType.ANULL);
+ feedFunctionUnionList.add(BuiltinType.ASTRING);
+ AUnionType feedFunctionUnion = new AUnionType(feedFunctionUnionList, null);
+
IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListType, orderedListType,
- BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, BuiltinType.ASTRING,
+ BuiltinType.ASTRING, BuiltinType.ASTRING, orderedListOfPropertiesType, feedFunctionUnion,
BuiltinType.ASTRING };
return new ARecordType(null, fieldNames, fieldTypes, true);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index fad1731..d92c76d 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -27,18 +27,16 @@
import edu.uci.ics.asterix.common.dataflow.IAsterixApplicationContextInfo;
import edu.uci.ics.asterix.common.parse.IParseFileSplitsDecl;
import edu.uci.ics.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IExternalDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.IFeedDatasetAdapterFactory.FeedAdapterType;
-import edu.uci.ics.asterix.external.adapter.factory.IGenericFeedDatasetAdapterFactory;
-import edu.uci.ics.asterix.external.adapter.factory.ITypedFeedDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.adapter.factory.ITypedDatasetAdapterFactory;
import edu.uci.ics.asterix.external.data.operator.ExternalDataScanOperatorDescriptor;
+import edu.uci.ics.asterix.external.data.operator.FeedIntakeOperatorDescriptor;
+import edu.uci.ics.asterix.external.data.operator.FeedMessageOperatorDescriptor;
import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
-import edu.uci.ics.asterix.external.dataset.adapter.IFeedDatasourceAdapter;
-import edu.uci.ics.asterix.feed.comm.IFeedMessage;
-import edu.uci.ics.asterix.feed.mgmt.FeedId;
-import edu.uci.ics.asterix.feed.operator.FeedIntakeOperatorDescriptor;
-import edu.uci.ics.asterix.feed.operator.FeedMessageOperatorDescriptor;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.external.feed.lifecycle.FeedId;
+import edu.uci.ics.asterix.external.feed.lifecycle.IFeedMessage;
import edu.uci.ics.asterix.formats.base.IDataFormat;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlTypeTraitProvider;
@@ -148,9 +146,9 @@
this.defaultDataverse = defaultDataverse;
this.stores = AsterixProperties.INSTANCE.getStores();
}
-
- public void setJobTxnId(long txnId){
- this.jobTxnId = txnId;
+
+ public void setJobTxnId(long txnId) {
+ this.jobTxnId = txnId;
}
public Dataverse getDefaultDataverse() {
@@ -272,7 +270,7 @@
throw new AlgebricksException("Can only scan datasets of records.");
}
- IExternalDatasetAdapterFactory adapterFactory;
+ IGenericDatasetAdapterFactory adapterFactory;
IDatasourceAdapter adapter;
String adapterName;
DatasourceAdapter adapterEntity;
@@ -283,22 +281,22 @@
adapterName);
if (adapterEntity != null) {
adapterFactoryClassname = adapterEntity.getClassname();
- adapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
} else {
adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
if (adapterFactoryClassname == null) {
throw new AlgebricksException(" Unknown adapter :" + adapterName);
}
- adapterFactory = (IExternalDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
+ adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
}
- adapter = ((IExternalDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties(),
+ adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties(),
itemType);
} catch (AlgebricksException ae) {
throw ae;
} catch (Exception e) {
e.printStackTrace();
- throw new AlgebricksException("unable to load the adapter factory class " + e);
+ throw new AlgebricksException("Unable to create adapter " + e);
}
if (!(adapter.getAdapterType().equals(IDatasourceAdapter.AdapterType.READ) || adapter.getAdapterType().equals(
@@ -313,7 +311,13 @@
ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec,
adapterFactoryClassname, datasetDetails.getProperties(), rt, scannerDesc);
- AlgebricksPartitionConstraint constraint = adapter.getPartitionConstraint();
+ AlgebricksPartitionConstraint constraint;
+ try {
+ constraint = adapter.getPartitionConstraint();
+ } catch (Exception e) {
+ throw new AlgebricksException(e);
+ }
+
return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(dataScanner, constraint);
}
@@ -346,25 +350,45 @@
FeedDatasetDetails datasetDetails = (FeedDatasetDetails) dataset.getDatasetDetails();
DatasourceAdapter adapterEntity;
IDatasourceAdapter adapter;
- IFeedDatasetAdapterFactory adapterFactory;
+ IAdapterFactory adapterFactory;
IAType adapterOutputType;
+ String adapterName;
+ String adapterFactoryClassname;
try {
- adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, null, datasetDetails.getAdapterFactory());
- adapterFactory = (IFeedDatasetAdapterFactory) Class.forName(adapterEntity.getClassname()).newInstance();
- if (adapterFactory.getFeedAdapterType().equals(FeedAdapterType.TYPED)) {
- adapter = ((ITypedFeedDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails
- .getProperties());
- adapterOutputType = ((IFeedDatasourceAdapter) adapter).getAdapterOutputType();
+ adapterName = datasetDetails.getAdapterFactory();
+ adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, MetadataConstants.METADATA_DATAVERSE_NAME,
+ adapterName);
+ if (adapterEntity != null) {
+ adapterFactoryClassname = adapterEntity.getClassname();
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
} else {
- String outputTypeName = datasetDetails.getProperties().get(
- IGenericFeedDatasetAdapterFactory.KEY_TYPE_NAME);
- adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, null, outputTypeName).getDatatype();
- adapter = ((IGenericFeedDatasetAdapterFactory) adapterFactory).createAdapter(
- datasetDetails.getProperties(), adapterOutputType);
+ adapterFactoryClassname = adapterFactoryMapping.get(adapterName);
+ if (adapterFactoryClassname != null) {
+ } else {
+ // adapterName has been provided as a fully qualified classname
+ adapterFactoryClassname = adapterName;
+ }
+ adapterFactory = (IAdapterFactory) Class.forName(adapterFactoryClassname).newInstance();
}
+
+ if (adapterFactory instanceof ITypedDatasetAdapterFactory) {
+ adapter = ((ITypedDatasetAdapterFactory) adapterFactory).createAdapter(datasetDetails.getProperties());
+ adapterOutputType = ((ITypedDatasourceAdapter) adapter).getAdapterOutputType();
+ } else if (adapterFactory instanceof IGenericDatasetAdapterFactory) {
+ String outputTypeName = datasetDetails.getProperties().get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME);
+ adapterOutputType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getDataverseName(),
+ outputTypeName).getDatatype();
+ adapter = ((IGenericDatasetAdapterFactory) adapterFactory).createAdapter(
+ datasetDetails.getProperties(), adapterOutputType);
+ } else {
+ throw new IllegalStateException(" Unknown factory type for " + adapterFactoryClassname);
+ }
+ } catch (AlgebricksException ae) {
+ throw ae;
} catch (Exception e) {
- throw new AlgebricksException(e);
+ e.printStackTrace();
+ throw new AlgebricksException("unable to create adapter " + e);
}
ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
@@ -372,11 +396,16 @@
RecordDescriptor feedDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
FeedIntakeOperatorDescriptor feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, new FeedId(
- dataset.getDataverseName(), dataset.getDatasetName()), adapterEntity.getClassname(),
+ dataset.getDataverseName(), dataset.getDatasetName()), adapterFactoryClassname,
datasetDetails.getProperties(), (ARecordType) adapterOutputType, feedDesc);
- return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor,
- adapter.getPartitionConstraint());
+ AlgebricksPartitionConstraint constraint = null;
+ try {
+ constraint = adapter.getPartitionConstraint();
+ } catch (Exception e) {
+ throw new AlgebricksException(e);
+ }
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedIngestor, constraint);
}
public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedMessengerRuntime(
@@ -820,10 +849,8 @@
TreeIndexInsertUpdateDeleteOperatorDescriptor btreeInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, recordDesc, appContext.getStorageManagerInterface(), appContext.getIndexRegistryProvider(),
splitsAndConstraint.first, typeTraits, comparatorFactories, fieldPermutation, indexOp,
- new BTreeDataflowHelperFactory(), filterFactory, NoOpOperationCallbackProvider.INSTANCE,
- jobTxnId);
- return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeInsert,
- splitsAndConstraint.second);
+ new BTreeDataflowHelperFactory(), filterFactory, NoOpOperationCallbackProvider.INSTANCE, jobTxnId);
+ return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeInsert, splitsAndConstraint.second);
} catch (MetadataException e) {
throw new AlgebricksException(e);
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java
deleted file mode 100644
index cc48d1e..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/AsterixBuiltinArtifactMap.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.metadata.entities;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import edu.uci.ics.asterix.metadata.bootstrap.MetadataConstants;
-import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-
-public class AsterixBuiltinArtifactMap {
-
- public enum ARTIFACT_KIND {
- DATASET,
- DATAVERSE,
- FUNCTION,
- NODEGROUP
- }
-
- public static final String ARTIFACT_TYPE_DATASET = "DATASET";
- public static final String ARTIFACT_TYPE_DATAVERSE = "DATAVERSE";
- public static final String ARTIFACT_TYPE_FUNCTION = "FUNCTION";
- public static final String ARTIFACT_TYPE_NODEGROUP = "NODEGROUP";
-
- public static final String DATASET_DATASETS = "Dataset";
- public static final String DATASET_INDEX = "Index";
- public static final String DATASET_NODEGROUP = "NodeGroup";
-
- public static final String DATAVERSE_METADATA = "Metadata";
-
- public static final String NODEGROUP_DEFAULT = MetadataConstants.METADATA_DEFAULT_NODEGROUP_NAME;
-
- private static final Map<ARTIFACT_KIND, Set<String>> builtinArtifactMap = new HashMap<ARTIFACT_KIND, Set<String>>();
-
- static {
- Set<String> datasets = new HashSet<String>();
- datasets.add(DATASET_DATASETS);
- datasets.add(DATASET_INDEX);
- datasets.add(DATASET_NODEGROUP);
- builtinArtifactMap.put(ARTIFACT_KIND.DATASET, datasets);
-
- Set<String> dataverses = new HashSet<String>();
- dataverses.add(DATAVERSE_METADATA);
- builtinArtifactMap.put(ARTIFACT_KIND.DATAVERSE, dataverses);
-
- Set<String> nodeGroups = new HashSet<String>();
- nodeGroups.add(NODEGROUP_DEFAULT);
- builtinArtifactMap.put(ARTIFACT_KIND.NODEGROUP, nodeGroups);
-
- }
-
- public static boolean isSystemProtectedArtifact(ARTIFACT_KIND kind, Object artifactIdentifier) {
- switch (kind) {
- case NODEGROUP:
- case DATASET:
- case DATAVERSE:
- return builtinArtifactMap.get(kind).contains((String) artifactIdentifier);
-
- case FUNCTION:
- return AsterixBuiltinFunctions.isBuiltinCompilerFunction((FunctionIdentifier) artifactIdentifier);
- default:
- return false;
- }
- }
-}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
index 895466d..367066b 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entities/FeedDatasetDetails.java
@@ -34,6 +34,11 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+/**
+ * Provides functionality for writing parameters for a FEED dataset into the
+ * Metadata. Since FEED dataset is a special kind of INTERNAL dataset, this
+ * class extends InternalDatasetDetails.
+ */
public class FeedDatasetDetails extends InternalDatasetDetails {
private static final long serialVersionUID = 1L;
@@ -141,8 +146,8 @@
// write field 7
fieldValue.reset();
- if (getFunction() != null) {
- aString.setValue(getFunction().toString());
+ if (signature != null) {
+ aString.setValue(signature.toString());
stringSerde.serialize(aString, fieldValue.getDataOutput());
feedRecordBuilder.addField(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX, fieldValue);
}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index 324429e..3cc6f2f 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -27,8 +27,6 @@
import java.util.Map;
import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.common.config.DatasetConfig;
import edu.uci.ics.asterix.common.config.DatasetConfig.DatasetType;
import edu.uci.ics.asterix.common.functions.FunctionSignature;
import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
@@ -41,15 +39,14 @@
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.FileStructure;
import edu.uci.ics.asterix.metadata.entities.InternalDatasetDetails.PartitioningStrategy;
+import edu.uci.ics.asterix.om.base.ANull;
import edu.uci.ics.asterix.om.base.AOrderedList;
import edu.uci.ics.asterix.om.base.ARecord;
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.base.IACursor;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
/**
* Translates a Dataset metadata entity to an ITupleReference and vice versa.
@@ -63,9 +60,6 @@
// Payload field containing serialized Dataset.
public static final int DATASET_PAYLOAD_TUPLE_FIELD_INDEX = 2;
- private FileSplit[] splits;
- private List<String> partitioningKey;
- private List<String> primaryKey;
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ARecord> recordSerDes = AqlSerializerDeserializerProvider.INSTANCE
.getSerializerDeserializer(MetadataRecordTypes.DATASET_RECORDTYPE);
@@ -95,7 +89,71 @@
DatasetType datasetType = DatasetType.valueOf(((AString) datasetRecord.getValueByPos(3)).getStringValue());
IDatasetDetails datasetDetails = null;
switch (datasetType) {
- case FEED:
+ case FEED: {
+ ARecord datasetDetailsRecord = (ARecord) datasetRecord
+ .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_FEEDDETAILS_FIELD_INDEX);
+ FileStructure fileStructure = FileStructure.valueOf(((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FILESTRUCTURE_FIELD_INDEX))
+ .getStringValue());
+ PartitioningStrategy partitioningStrategy = PartitioningStrategy
+ .valueOf(((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONSTRATEGY_FIELD_INDEX))
+ .getStringValue());
+ IACursor cursor = ((AOrderedList) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PARTITIONKEY_FIELD_INDEX)).getCursor();
+ List<String> partitioningKey = new ArrayList<String>();
+ while (cursor.next())
+ partitioningKey.add(((AString) cursor.get()).getStringValue());
+ String groupName = ((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
+ .getStringValue();
+ String adapter = ((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
+ .getStringValue();
+ cursor = ((AOrderedList) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX)).getCursor();
+ Map<String, String> properties = new HashMap<String, String>();
+ String key;
+ String value;
+ while (cursor.next()) {
+ ARecord field = (ARecord) cursor.get();
+ key = ((AString) field
+ .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
+ .getStringValue();
+ value = ((AString) field
+ .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
+ .getStringValue();
+ properties.put(key, value);
+ }
+
+ Object o = datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX);
+ FunctionSignature signature = null;
+ if (!(o instanceof ANull)) {
+ String functionIdentifier = ((AString) o).getStringValue();
+ String[] qnameComponents = functionIdentifier.split("\\.");
+ String functionDataverse;
+ String functionName;
+ if (qnameComponents.length == 2) {
+ functionDataverse = qnameComponents[0];
+ functionName = qnameComponents[1];
+ } else {
+ functionDataverse = dataverseName;
+ functionName = qnameComponents[0];
+ }
+
+ String[] nameComponents = functionName.split("@");
+ signature = new FunctionSignature(functionDataverse, nameComponents[0],
+ Integer.parseInt(nameComponents[1]));
+ }
+
+ String feedState = ((AString) datasetDetailsRecord
+ .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX)).getStringValue();
+
+ datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
+ partitioningKey, groupName, adapter, properties, signature, feedState);
+ break;
+ }
case INTERNAL: {
ARecord datasetDetailsRecord = (ARecord) datasetRecord
.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_INTERNALDETAILS_FIELD_INDEX);
@@ -116,48 +174,9 @@
.getValueByPos(MetadataRecordTypes.INTERNAL_DETAILS_ARECORD_GROUPNAME_FIELD_INDEX))
.getStringValue();
- if (datasetType == DatasetConfig.DatasetType.INTERNAL) {
- datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
- partitioningKey, groupName);
- } else {
- String adapter = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_DATASOURCE_ADAPTER_FIELD_INDEX))
- .getStringValue();
- cursor = ((AOrderedList) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_PROPERTIES_FIELD_INDEX))
- .getCursor();
- Map<String, String> properties = new HashMap<String, String>();
- String key;
- String value;
- while (cursor.next()) {
- ARecord field = (ARecord) cursor.get();
- key = ((AString) field
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX))
- .getStringValue();
- value = ((AString) field
- .getValueByPos(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX))
- .getStringValue();
- properties.put(key, value);
- }
+ datasetDetails = new InternalDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
+ partitioningKey, groupName);
- String functionIdentifier = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_FUNCTION_FIELD_INDEX))
- .getStringValue();
- String[] nameComponents1 = functionIdentifier.split(".");
- String functionDataverse = nameComponents1[0];
- String[] nameComponents2 = nameComponents1[1].split("@");
- String functionName = nameComponents2[0];
- FunctionSignature signature = new FunctionSignature(functionDataverse, functionName,
- Integer.parseInt(nameComponents2[1]));
-
- String feedState = ((AString) datasetDetailsRecord
- .getValueByPos(MetadataRecordTypes.FEED_DETAILS_ARECORD_STATE_FIELD_INDEX))
- .getStringValue();
-
- datasetDetails = new FeedDatasetDetails(fileStructure, partitioningStrategy, partitioningKey,
- partitioningKey, groupName, adapter, properties, signature, feedState);
-
- }
break;
}
@@ -263,23 +282,4 @@
}
- public void writePropertyTypeRecord(String name, String value, DataOutput out) throws IOException {
- IARecordBuilder propertyRecordBuilder = new RecordBuilder();
- ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
- propertyRecordBuilder.reset(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_RECORDTYPE);
-
- // write field 0
- fieldValue.reset();
- aString.setValue(name);
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- propertyRecordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_NAME_FIELD_INDEX, fieldValue);
-
- // write field 1
- fieldValue.reset();
- aString.setValue(value);
- stringSerde.serialize(aString, fieldValue.getDataOutput());
- propertyRecordBuilder.addField(MetadataRecordTypes.DATASOURCE_ADAPTER_PROPERTIES_ARECORD_VALUE_FIELD_INDEX, fieldValue);
-
- propertyRecordBuilder.write(out, true);
- }
}
\ No newline at end of file
diff --git a/asterix-om/pom.xml b/asterix-om/pom.xml
index a14f3c2..5368f07 100644
--- a/asterix-om/pom.xml
+++ b/asterix-om/pom.xml
@@ -5,10 +5,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-om</artifactId>
- <version>0.0.4-SNAPSHOT</version>
-
<build>
<plugins>
<plugin>
@@ -33,7 +30,6 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-invertedindex</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
@@ -43,13 +39,11 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-algebricks-compiler</artifactId>
- <version>0.2.2-SNAPSHOT</version>
+ <artifactId>algebricks-compiler</artifactId>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- </dependency>
+ </dependency>
</dependencies>
</project>
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java
new file mode 100644
index 0000000..83b165b
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/hash/MurmurHash3BinaryHashFunctionFamily.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.dataflow.data.nontagged.hash;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+/**
+ * An implementation of the Murmur3 hash family. The code is implemented based
+ * on the original <a
+ * href=http://code.google.com/p/guava-libraries/source/browse
+ * /guava/src/com/google/common/hash/Murmur3_32HashFunction.java>guava
+ * implementation</a> from Google Guava library.
+ */
+public class MurmurHash3BinaryHashFunctionFamily implements
+ IBinaryHashFunctionFamily {
+
+ public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
+
+ private static final long serialVersionUID = 1L;
+
+ private MurmurHash3BinaryHashFunctionFamily() {
+ }
+
+ private static final int C1 = 0xcc9e2d51;
+ private static final int C2 = 0x1b873593;
+ private static final int C3 = 5;
+ private static final int C4 = 0xe6546b64;
+ private static final int C5 = 0x85ebca6b;
+ private static final int C6 = 0xc2b2ae35;
+
+ @Override
+ public IBinaryHashFunction createBinaryHashFunction(final int seed) {
+ return new IBinaryHashFunction() {
+ @Override
+ public int hash(byte[] bytes, int offset, int length) {
+ int h = seed;
+ int p = offset;
+ int remain = length;
+ while (remain >= 4) {
+ int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8)
+ | ((bytes[p + 2] & 0xff) << 16)
+ | ((bytes[p + 3] & 0xff) << 24);
+ k *= C1;
+ k = Integer.rotateLeft(k, 15);
+ k *= C2;
+ h ^= k;
+ h = Integer.rotateLeft(h, 13);
+ h = h * C3 + C4;
+ p += 4;
+ remain -= 4;
+ }
+ if (remain > 0) {
+ int k = 0;
+ for (int i = 0; remain > 0; i += 8) {
+ k ^= (bytes[p++] & 0xff) << i;
+ remain--;
+ }
+ k *= C1;
+ k = Integer.rotateLeft(k, 15);
+ k *= C2;
+ h ^= k;
+ }
+ h ^= length;
+ h ^= (h >>> 16);
+ h *= C5;
+ h ^= (h >>> 13);
+ h *= C6;
+ h ^= (h >>> 16);
+ return h;
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32AscNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32AscNormalizedKeyComputerFactory.java
deleted file mode 100644
index 903ac3d..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32AscNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.IntegerNormalizedKeyComputerFactory;
-
-public class AInt32AscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AInt32AscNormalizedKeyComputerFactory INSTANCE = new AInt32AscNormalizedKeyComputerFactory();
-
- private IntegerNormalizedKeyComputerFactory inkcf = new IntegerNormalizedKeyComputerFactory();
-
- private AInt32AscNormalizedKeyComputerFactory() {
- }
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer intNkc = inkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- return intNkc.normalize(bytes, start + 1, length - 1);
- }
- };
- }
-
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32DescNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32DescNormalizedKeyComputerFactory.java
deleted file mode 100644
index cec6a40..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AInt32DescNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.IntegerNormalizedKeyComputerFactory;
-
-public class AInt32DescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AInt32DescNormalizedKeyComputerFactory INSTANCE = new AInt32DescNormalizedKeyComputerFactory();
-
- private IntegerNormalizedKeyComputerFactory inkcf = new IntegerNormalizedKeyComputerFactory();
-
- private AInt32DescNormalizedKeyComputerFactory() {
- }
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer intNkc = inkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- int nk = intNkc.normalize(bytes, start + 1, length - 1);
- return (int) ((long) 0xffffffff - (long) nk);
- }
- };
- }
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringAscNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringAscNormalizedKeyComputerFactory.java
deleted file mode 100644
index 0d1780f..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringAscNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
-
-public class AStringAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AStringAscNormalizedKeyComputerFactory INSTANCE = new AStringAscNormalizedKeyComputerFactory();
-
- private AStringAscNormalizedKeyComputerFactory() {
- }
-
- private UTF8StringNormalizedKeyComputerFactory strnkcf = new UTF8StringNormalizedKeyComputerFactory();
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer strNkc = strnkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- return strNkc.normalize(bytes, start + 1, length - 1);
- }
- };
- }
-
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringDescNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringDescNormalizedKeyComputerFactory.java
deleted file mode 100644
index 0ff6acd..0000000
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AStringDescNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
-
-public class AStringDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
-
- private static final long serialVersionUID = 1L;
-
- public static final AStringDescNormalizedKeyComputerFactory INSTANCE = new AStringDescNormalizedKeyComputerFactory();
-
- private AStringDescNormalizedKeyComputerFactory() {
- }
-
- private UTF8StringNormalizedKeyComputerFactory strnkcf = new UTF8StringNormalizedKeyComputerFactory();
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- final INormalizedKeyComputer strNkc = strnkcf.createNormalizedKeyComputer();
- return new INormalizedKeyComputer() {
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- int nk = strNkc.normalize(bytes, start + 1, length - 1);
- return (int) ((long) 0xffffffff - (long) nk);
- }
- };
- }
-
-}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedAscNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedAscNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..2efa593
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedAscNormalizedKeyComputerFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+/**
+ * This class uses a decorator pattern to wrap an ASC ordered INomralizedKeyComputerFactory implementation to
+ * obtain the ASC order.
+ */
+public class AWrappedAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+ private static final long serialVersionUID = 1L;
+ private final INormalizedKeyComputerFactory nkcf;
+
+ public AWrappedAscNormalizedKeyComputerFactory(INormalizedKeyComputerFactory nkcf) {
+ this.nkcf = nkcf;
+ }
+
+ @Override
+ public INormalizedKeyComputer createNormalizedKeyComputer() {
+ final INormalizedKeyComputer nkc = nkcf.createNormalizedKeyComputer();
+ return new INormalizedKeyComputer() {
+
+ @Override
+ public int normalize(byte[] bytes, int start, int length) {
+ // start +1, length -1 is because in ASTERIX data format, there is always a type tag before the value
+ return nkc.normalize(bytes, start + 1, length - 1);
+ }
+ };
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedDescNormalizedKeyComputerFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedDescNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..a5653b9
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/keynormalizers/AWrappedDescNormalizedKeyComputerFactory.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+
+/**
+ * This class uses a decorator pattern to wrap an ASC ordered INomralizedKeyComputerFactory implementation
+ * to obtain the DESC order.
+ */
+public class AWrappedDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+
+ private static final long serialVersionUID = 1L;
+ private final INormalizedKeyComputerFactory nkcf;
+
+ public AWrappedDescNormalizedKeyComputerFactory(INormalizedKeyComputerFactory nkcf) {
+ this.nkcf = nkcf;
+ }
+
+ @Override
+ public INormalizedKeyComputer createNormalizedKeyComputer() {
+ final INormalizedKeyComputer nkc = nkcf.createNormalizedKeyComputer();
+ return new INormalizedKeyComputer() {
+
+ @Override
+ public int normalize(byte[] bytes, int start, int length) {
+ int key = nkc.normalize(bytes, start + 1, length - 1);
+ return (int) ((long) 0xffffffff - (long) key);
+ }
+ };
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ABooleanPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ABooleanPrinter.java
index 60ee1fe..33866df 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ABooleanPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ABooleanPrinter.java
@@ -8,7 +8,6 @@
public class ABooleanPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ABooleanPrinter INSTANCE = new ABooleanPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ACirclePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ACirclePrinter.java
index 812d93b..e2da96e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ACirclePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ACirclePrinter.java
@@ -8,7 +8,6 @@
public class ACirclePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ACirclePrinter INSTANCE = new ACirclePrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADatePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADatePrinter.java
index 24aa144..918e286 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADatePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADatePrinter.java
@@ -9,11 +9,10 @@
public class ADatePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
private static long CHRONON_OF_DAY = 24 * 60 * 60 * 1000;
public static final ADatePrinter INSTANCE = new ADatePrinter();
private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
-
+
@Override
public void init() {
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java
index d856a20..96df767 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADateTimePrinter.java
@@ -9,7 +9,6 @@
public class ADateTimePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ADateTimePrinter INSTANCE = new ADateTimePrinter();
private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADoublePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADoublePrinter.java
index 3a255c6..ae50d0e 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADoublePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADoublePrinter.java
@@ -8,7 +8,6 @@
public class ADoublePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ADoublePrinter INSTANCE = new ADoublePrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADurationPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADurationPrinter.java
index a59b9ac..061d372 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADurationPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ADurationPrinter.java
@@ -12,7 +12,6 @@
public class ADurationPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ADurationPrinter INSTANCE = new ADurationPrinter();
private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinter.java
index 27d2e2c..96c9ee9 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AFloatPrinter.java
@@ -8,7 +8,6 @@
public class AFloatPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final AFloatPrinter INSTANCE = new AFloatPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16Printer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16Printer.java
index 8961013..1687fb1 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16Printer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt16Printer.java
@@ -13,8 +13,6 @@
public class AInt16Printer implements IPrinter {
- private static final long serialVersionUID = 1L;
-
private static final String SUFFIX_STRING = "i16";
private static byte[] _suffix;
private static int _suffix_count;
@@ -23,6 +21,7 @@
DataOutput dout = new DataOutputStream(interm);
try {
dout.writeUTF(SUFFIX_STRING);
+ interm.close();
} catch (IOException e) {
e.printStackTrace();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32Printer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32Printer.java
index a5c3245..b20db34 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32Printer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt32Printer.java
@@ -10,7 +10,6 @@
public class AInt32Printer implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final AInt32Printer INSTANCE = new AInt32Printer();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt64Printer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt64Printer.java
index 318828e..312802a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt64Printer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt64Printer.java
@@ -13,8 +13,6 @@
public class AInt64Printer implements IPrinter {
- private static final long serialVersionUID = 1L;
-
private static final String SUFFIX_STRING = "i64";
private static byte[] _suffix;
private static int _suffix_count;
@@ -23,6 +21,7 @@
DataOutput dout = new DataOutputStream(interm);
try {
dout.writeUTF(SUFFIX_STRING);
+ interm.close();
} catch (IOException e) {
e.printStackTrace();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8Printer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8Printer.java
index bd4139d..a5a421d 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8Printer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AInt8Printer.java
@@ -13,8 +13,6 @@
public class AInt8Printer implements IPrinter {
- private static final long serialVersionUID = 1L;
-
private static final String SUFFIX_STRING = "i8";
private static byte[] _suffix;
private static int _suffix_count;
@@ -23,6 +21,7 @@
DataOutput dout = new DataOutputStream(interm);
try {
dout.writeUTF(SUFFIX_STRING);
+ interm.close();
} catch (IOException e) {
e.printStackTrace();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ALinePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ALinePrinter.java
index f99727a..40f405c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ALinePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ALinePrinter.java
@@ -8,7 +8,6 @@
public class ALinePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ALinePrinter INSTANCE = new ALinePrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinter.java
index 6abc0e1..daea967 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ANullPrinter.java
@@ -5,11 +5,8 @@
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
-
-
public class ANullPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ANullPrinter INSTANCE = new ANullPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
index aff30f7..edcfc8a 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AObjectPrinter.java
@@ -10,7 +10,6 @@
public class AObjectPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final AObjectPrinter INSTANCE = new AObjectPrinter();
private IPrinter recordPrinter = new ARecordPrinterFactory(null).createPrinter();
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AOrderedlistPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AOrderedlistPrinterFactory.java
index 563b14c..7d04ebc 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AOrderedlistPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AOrderedlistPrinterFactory.java
@@ -2,22 +2,21 @@
import java.io.PrintStream;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
-import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.printer.APrintVisitor;
import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
public class AOrderedlistPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
-
private AOrderedListType orderedlistType;
public AOrderedlistPrinterFactory(AOrderedListType orderedlistType) {
@@ -27,77 +26,29 @@
@Override
public IPrinter createPrinter() {
- return new IPrinter() {
+ PointableAllocator allocator = new PointableAllocator();
+ final IAType inputType = orderedlistType == null ? DefaultOpenFieldType
+ .getDefaultOpenFieldType(ATypeTag.ORDEREDLIST) : orderedlistType;
+ final IVisitablePointable listAccessor = allocator.allocateListValue(inputType);
+ final APrintVisitor printVisitor = new APrintVisitor();
+ final Pair<PrintStream, ATypeTag> arg = new Pair<PrintStream, ATypeTag>(null, null);
- private IPrinter itemPrinter;
- private IAType itemType;
- private ATypeTag itemTag;
- private boolean typedItemList = false;
+ return new IPrinter() {
@Override
public void init() throws AlgebricksException {
-
- if (orderedlistType != null && orderedlistType.getItemType() != null) {
- itemType = orderedlistType.getItemType();
- if (itemType.getTypeTag() == ATypeTag.ANY) {
- this.typedItemList = false;
- this.itemPrinter = AObjectPrinterFactory.INSTANCE.createPrinter();
- } else {
- this.typedItemList = true;
- itemPrinter = AqlPrinterFactoryProvider.INSTANCE.getPrinterFactory(itemType).createPrinter();
- itemTag = orderedlistType.getItemType().getTypeTag();
- }
- } else {
- this.typedItemList = false;
- this.itemPrinter = AObjectPrinterFactory.INSTANCE.createPrinter();
- }
- itemPrinter.init();
-
+ arg.second = inputType.getTypeTag();
}
@Override
- public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
- ps.print("[ ");
- int numberOfitems = AInt32SerializerDeserializer.getInt(b, s + 6);
- int itemOffset;
- if (typedItemList) {
- switch (itemTag) {
- case STRING:
- case RECORD:
- case ORDEREDLIST:
- case UNORDEREDLIST:
- case ANY:
- itemOffset = s + 10 + (numberOfitems * 4);
- break;
- default:
- itemOffset = s + 10;
- }
- } else
- itemOffset = s + 10 + (numberOfitems * 4);
- int itemLength = 0;
+ public void print(byte[] b, int start, int l, PrintStream ps) throws AlgebricksException {
try {
- if (typedItemList) {
- for (int i = 0; i < numberOfitems; i++) {
- itemLength = NonTaggedFormatUtil.getFieldValueLength(b, itemOffset, itemTag, false);
- itemPrinter.print(b, itemOffset - 1, itemLength, ps);
- itemOffset += itemLength;
- if (i + 1 < numberOfitems)
- ps.print(", ");
- }
- } else {
- for (int i = 0; i < numberOfitems; i++) {
- itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[itemOffset]);
- itemLength = NonTaggedFormatUtil.getFieldValueLength(b, itemOffset, itemTag, true) + 1;
- itemPrinter.print(b, itemOffset, itemLength, ps);
- itemOffset += itemLength;
- if (i + 1 < numberOfitems)
- ps.print(", ");
- }
- }
- } catch (AsterixException e) {
- throw new AlgebricksException(e);
+ listAccessor.set(b, start, l);
+ arg.first = ps;
+ listAccessor.accept(printVisitor, arg);
+ } catch (Exception ioe) {
+ throw new AlgebricksException(ioe);
}
- ps.print(" ]");
}
};
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APoint3DPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APoint3DPrinter.java
index 699cb77..7394dc5 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APoint3DPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APoint3DPrinter.java
@@ -8,7 +8,6 @@
public class APoint3DPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final APoint3DPrinter INSTANCE = new APoint3DPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinter.java
index 50fbb0b..7c50905 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APointPrinter.java
@@ -8,7 +8,6 @@
public class APointPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final APointPrinter INSTANCE = new APointPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APolygonPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APolygonPrinter.java
index 794dae7..bc1e553 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APolygonPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/APolygonPrinter.java
@@ -9,7 +9,6 @@
public class APolygonPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final APolygonPrinter INSTANCE = new APolygonPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARecordPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARecordPrinterFactory.java
index fafe320..e4b2676 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARecordPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARecordPrinterFactory.java
@@ -2,23 +2,21 @@
import java.io.PrintStream;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
-import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.printer.APrintVisitor;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
public class ARecordPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
-
private final ARecordType recType;
public ARecordPrinterFactory(ARecordType recType) {
@@ -28,137 +26,29 @@
@Override
public IPrinter createPrinter() {
- return new IPrinter() {
+ PointableAllocator allocator = new PointableAllocator();
+ final IAType inputType = recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.RECORD)
+ : recType;
+ final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
+ final APrintVisitor printVisitor = new APrintVisitor();
+ final Pair<PrintStream, ATypeTag> arg = new Pair<PrintStream, ATypeTag>(null, null);
- private IPrinter[] fieldPrinters;
- private IAType[] fieldTypes;
- private int[] fieldOffsets;
- private int numberOfSchemaFields, numberOfOpenFields, openPartOffset, fieldOffset, offsetArrayOffset,
- fieldValueLength, nullBitMapOffset, recordOffset;
- private boolean isExpanded, hasNullableFields;
- private ATypeTag tag;
+ return new IPrinter() {
@Override
public void init() throws AlgebricksException {
-
- numberOfSchemaFields = 0;
- if (recType != null) {
- numberOfSchemaFields = recType.getFieldNames().length;
- fieldPrinters = new IPrinter[numberOfSchemaFields];
- fieldTypes = new IAType[numberOfSchemaFields];
- fieldOffsets = new int[numberOfSchemaFields];
- for (int i = 0; i < numberOfSchemaFields; i++) {
- fieldTypes[i] = recType.getFieldTypes()[i];
- if (fieldTypes[i].getTypeTag() == ATypeTag.UNION
- && NonTaggedFormatUtil.isOptionalField((AUnionType) fieldTypes[i]))
- fieldPrinters[i] = (AqlPrinterFactoryProvider.INSTANCE
- .getPrinterFactory(((AUnionType) fieldTypes[i]).getUnionList().get(
- NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST))).createPrinter();
- else
- fieldPrinters[i] = (AqlPrinterFactoryProvider.INSTANCE.getPrinterFactory(fieldTypes[i]))
- .createPrinter();
- fieldPrinters[i].init();
- }
- }
+ arg.second = inputType.getTypeTag();
}
@Override
public void print(byte[] b, int start, int l, PrintStream ps) throws AlgebricksException {
- ps.print("{ ");
- isExpanded = false;
- openPartOffset = 0;
- int s = start;
- recordOffset = s;
- if (recType == null) {
- openPartOffset = s + AInt32SerializerDeserializer.getInt(b, s + 6);
- s += 8;
- isExpanded = true;
- } else {
- if (recType.isOpen()) {
- isExpanded = b[s + 5] == 1 ? true : false;
- if (isExpanded) {
- openPartOffset = s + AInt32SerializerDeserializer.getInt(b, s + 6);
- s += 10;
- } else
- s += 6;
- } else
- s += 5;
- }
try {
- if (numberOfSchemaFields > 0) {
- s += 4;
- nullBitMapOffset = 0;
- hasNullableFields = NonTaggedFormatUtil.hasNullableField(recType);
- if (hasNullableFields) {
- nullBitMapOffset = s;
- offsetArrayOffset = s
- + (this.numberOfSchemaFields % 8 == 0 ? numberOfSchemaFields / 8
- : numberOfSchemaFields / 8 + 1);
- } else {
- offsetArrayOffset = s;
- }
- for (int i = 0; i < numberOfSchemaFields; i++) {
- fieldOffsets[i] = AInt32SerializerDeserializer.getInt(b, offsetArrayOffset) + recordOffset;
- offsetArrayOffset += 4;
- }
- for (int fieldNumber = 0; fieldNumber < numberOfSchemaFields; fieldNumber++) {
- if (fieldNumber != 0) {
- ps.print(", ");
- }
- ps.print("\"");
- ps.print(recType.getFieldNames()[fieldNumber]);
- ps.print("\": ");
- if (hasNullableFields) {
- byte b1 = b[nullBitMapOffset + fieldNumber / 8];
- int p = 1 << (7 - (fieldNumber % 8));
- if ((b1 & p) == 0) {
- ps.print("null");
- continue;
- }
- }
- if (fieldTypes[fieldNumber].getTypeTag() == ATypeTag.UNION) {
- if (NonTaggedFormatUtil.isOptionalField((AUnionType) fieldTypes[fieldNumber])) {
- tag = ((AUnionType) fieldTypes[fieldNumber]).getUnionList()
- .get(NonTaggedFormatUtil.OPTIONAL_TYPE_INDEX_IN_UNION_LIST).getTypeTag();
- fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b,
- fieldOffsets[fieldNumber], tag, false);
- fieldPrinters[fieldNumber].print(b, fieldOffsets[fieldNumber] - 1,
- fieldValueLength, ps);
- }
- } else {
- tag = fieldTypes[fieldNumber].getTypeTag();
- fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b,
- fieldOffsets[fieldNumber], tag, false);
- fieldPrinters[fieldNumber]
- .print(b, fieldOffsets[fieldNumber] - 1, fieldValueLength, ps);
- }
- }
- if (isExpanded)
- ps.print(", ");
- }
- if (isExpanded) {
- numberOfOpenFields = AInt32SerializerDeserializer.getInt(b, openPartOffset);
- fieldOffset = openPartOffset + 4 + (8 * numberOfOpenFields);
- for (int i = 0; i < numberOfOpenFields; i++) {
- // we print the field name
- fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, ATypeTag.STRING,
- false);
- AStringPrinter.INSTANCE.print(b, fieldOffset - 1, fieldValueLength, ps);
- fieldOffset += fieldValueLength;
- ps.print(": ");
- // now we print the value
- tag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[fieldOffset]);
- fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, tag, true) + 1;
- AObjectPrinter.INSTANCE.print(b, fieldOffset, fieldValueLength, ps);
- fieldOffset += fieldValueLength;
- if (i + 1 < numberOfOpenFields)
- ps.print(", ");
- }
- }
- } catch (AsterixException e) {
- throw new AlgebricksException(e);
+ recAccessor.set(b, start, l);
+ arg.first = ps;
+ recAccessor.accept(printVisitor, arg);
+ } catch (Exception ioe) {
+ throw new AlgebricksException(ioe);
}
- ps.print(" }");
}
};
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
index 17caa53..0618f28 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ARectanglePrinter.java
@@ -8,7 +8,6 @@
public class ARectanglePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ARectanglePrinter INSTANCE = new ARectanglePrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AStringPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AStringPrinter.java
index 6f5b629..d491777 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AStringPrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AStringPrinter.java
@@ -9,7 +9,6 @@
public class AStringPrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final AStringPrinter INSTANCE = new AStringPrinter();
@Override
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ATimePrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ATimePrinter.java
index 72a6e37..b77ee64 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ATimePrinter.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/ATimePrinter.java
@@ -9,10 +9,9 @@
public class ATimePrinter implements IPrinter {
- private static final long serialVersionUID = 1L;
public static final ATimePrinter INSTANCE = new ATimePrinter();
private static final GregorianCalendarSystem gCalInstance = GregorianCalendarSystem.getInstance();
-
+
@Override
public void init() {
@@ -21,7 +20,7 @@
@Override
public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
int time = AInt32SerializerDeserializer.getInt(b, s + 1);
-
+
ps.print("time(\"");
ps.append(String.format("%02d", gCalInstance.getHourOfDay(time))).append(":")
.append(String.format("%02d", gCalInstance.getMinOfHour(time))).append(":")
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUnorderedlistPrinterFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUnorderedlistPrinterFactory.java
index 59af365..f779919 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUnorderedlistPrinterFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/printers/AUnorderedlistPrinterFactory.java
@@ -2,22 +2,21 @@
import java.io.PrintStream;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
-import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.printer.APrintVisitor;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.data.IPrinter;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactory;
public class AUnorderedlistPrinterFactory implements IPrinterFactory {
private static final long serialVersionUID = 1L;
-
private AUnorderedListType unorderedlistType;
public AUnorderedlistPrinterFactory(AUnorderedListType unorderedlistType) {
@@ -27,77 +26,29 @@
@Override
public IPrinter createPrinter() {
- return new IPrinter() {
+ PointableAllocator allocator = new PointableAllocator();
+ final IAType inputType = unorderedlistType == null ? DefaultOpenFieldType
+ .getDefaultOpenFieldType(ATypeTag.UNORDEREDLIST) : unorderedlistType;
+ final IVisitablePointable listAccessor = allocator.allocateListValue(inputType);
+ final APrintVisitor printVisitor = new APrintVisitor();
+ final Pair<PrintStream, ATypeTag> arg = new Pair<PrintStream, ATypeTag>(null, null);
- private IPrinter itemPrinter;
- private IAType itemType;
- private ATypeTag itemTag;
- private boolean typedItemList = false;
+ return new IPrinter() {
@Override
public void init() throws AlgebricksException {
-
- if (unorderedlistType != null && unorderedlistType.getItemType() != null) {
- itemType = unorderedlistType.getItemType();
- if (itemType.getTypeTag() == ATypeTag.ANY) {
- this.typedItemList = false;
- this.itemPrinter = AObjectPrinterFactory.INSTANCE.createPrinter();
- } else {
- this.typedItemList = true;
- itemPrinter = AqlPrinterFactoryProvider.INSTANCE.getPrinterFactory(itemType).createPrinter();
- itemTag = unorderedlistType.getItemType().getTypeTag();
- }
- } else {
- this.typedItemList = false;
- this.itemPrinter = AObjectPrinterFactory.INSTANCE.createPrinter();
- }
- itemPrinter.init();
+ arg.second = inputType.getTypeTag();
}
@Override
- public void print(byte[] b, int s, int l, PrintStream ps) throws AlgebricksException {
- ps.print("{{ ");
- int numberOfitems = AInt32SerializerDeserializer.getInt(b, s + 6);
- int itemOffset;
- if (typedItemList) {
- switch (itemTag) {
- case STRING:
- case RECORD:
- case ORDEREDLIST:
- case UNORDEREDLIST:
- case ANY:
- itemOffset = s + 10 + (numberOfitems * 4);
- break;
- default:
- itemOffset = s + 10;
- }
- } else
- itemOffset = s + 10 + (numberOfitems * 4);
- int itemLength;
-
+ public void print(byte[] b, int start, int l, PrintStream ps) throws AlgebricksException {
try {
- if (typedItemList) {
- for (int i = 0; i < numberOfitems; i++) {
- itemLength = NonTaggedFormatUtil.getFieldValueLength(b, itemOffset, itemTag, false);
- itemPrinter.print(b, itemOffset - 1, itemLength, ps);
- itemOffset += itemLength;
- if (i + 1 < numberOfitems)
- ps.print(", ");
- }
- } else {
- for (int i = 0; i < numberOfitems; i++) {
- itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[itemOffset]);
- itemLength = NonTaggedFormatUtil.getFieldValueLength(b, itemOffset, itemTag, true) + 1;
- itemPrinter.print(b, itemOffset, itemLength, ps);
- itemOffset += itemLength;
- if (i + 1 < numberOfitems)
- ps.print(", ");
- }
- }
- } catch (AsterixException e) {
- throw new AlgebricksException(e);
+ listAccessor.set(b, start, l);
+ arg.first = ps;
+ listAccessor.accept(printVisitor, arg);
+ } catch (Exception ioe) {
+ throw new AlgebricksException(ioe);
}
- ps.print(" }}");
}
};
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AOrderedListSerializerDeserializer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AOrderedListSerializerDeserializer.java
index 38262d3..4d115d7 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AOrderedListSerializerDeserializer.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/dataflow/data/nontagged/serde/AOrderedListSerializerDeserializer.java
@@ -25,9 +25,9 @@
public static final AOrderedListSerializerDeserializer SCHEMALESS_INSTANCE = new AOrderedListSerializerDeserializer();
private IAType itemType;
- @SuppressWarnings("unchecked")
+ @SuppressWarnings("rawtypes")
private ISerializerDeserializer serializer;
- @SuppressWarnings("unchecked")
+ @SuppressWarnings("rawtypes")
private ISerializerDeserializer deserializer;
private AOrderedListType orderedlistType;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
index c1df096..813c3a0 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/base/IDataFormat.java
@@ -14,6 +14,7 @@
import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -63,4 +64,6 @@
public IExpressionEvalSizeComputer getExpressionEvalSizeComputer();
public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider();
+
+ public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider();
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
index e4db8da..09c99b8 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryComparatorFactoryProvider.java
@@ -19,8 +19,8 @@
import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
import edu.uci.ics.hyracks.data.std.primitive.LongPointable;
+import edu.uci.ics.hyracks.data.std.primitive.RawUTF8StringPointable;
import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
public class AqlBinaryComparatorFactoryProvider implements IBinaryComparatorFactoryProvider, Serializable {
@@ -39,7 +39,7 @@
public static final PointableBinaryComparatorFactory DOUBLE_POINTABLE_INSTANCE = new PointableBinaryComparatorFactory(
DoublePointable.FACTORY);
public static final PointableBinaryComparatorFactory UTF8STRING_POINTABLE_INSTANCE = new PointableBinaryComparatorFactory(
- UTF8StringPointable.FACTORY);
+ RawUTF8StringPointable.FACTORY);
// Equivalent to UTF8STRING_POINTABLE_INSTANCE but all characters are considered lower case to implement case-insensitive comparisons.
public static final PointableBinaryComparatorFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE = new PointableBinaryComparatorFactory(
UTF8StringLowercasePointable.FACTORY);
@@ -64,11 +64,11 @@
public IBinaryComparatorFactory getBinaryComparatorFactory(Object type, boolean ascending) {
if (type == null) {
return anyBinaryComparatorFactory(ascending);
- }
+ }
IAType aqlType = (IAType) type;
return getBinaryComparatorFactory(aqlType.getTypeTag(), ascending);
}
-
+
public IBinaryComparatorFactory getBinaryComparatorFactory(ATypeTag type, boolean ascending) {
switch (type) {
case ANY:
@@ -125,8 +125,7 @@
return addOffset(ADateTimeAscBinaryComparatorFactory.INSTANCE, ascending);
}
default: {
- throw new NotImplementedException("No binary comparator factory implemented for type "
- + type + " .");
+ throw new NotImplementedException("No binary comparator factory implemented for type " + type + " .");
}
}
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
index cab8ded..1afdbf8 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFactoryProvider.java
@@ -16,20 +16,25 @@
import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
import edu.uci.ics.hyracks.data.std.primitive.FloatPointable;
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.RawUTF8StringPointable;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-
public class AqlBinaryHashFunctionFactoryProvider implements IBinaryHashFunctionFactoryProvider, Serializable {
private static final long serialVersionUID = 1L;
public static final AqlBinaryHashFunctionFactoryProvider INSTANCE = new AqlBinaryHashFunctionFactoryProvider();
- public static final PointableBinaryHashFunctionFactory INTEGER_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(IntegerPointable.FACTORY);
- public static final PointableBinaryHashFunctionFactory FLOAT_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(FloatPointable.FACTORY);
- public static final PointableBinaryHashFunctionFactory DOUBLE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(DoublePointable.FACTORY);
- public static final PointableBinaryHashFunctionFactory UTF8STRING_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory INTEGER_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ IntegerPointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory FLOAT_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ FloatPointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory DOUBLE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ DoublePointable.FACTORY);
+ public static final PointableBinaryHashFunctionFactory UTF8STRING_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ RawUTF8StringPointable.FACTORY);
// Equivalent to UTF8STRING_POINTABLE_INSTANCE but all characters are considered lower case to implement case-insensitive hashing.
- public static final PointableBinaryHashFunctionFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(UTF8StringLowercasePointable.FACTORY);
-
+ public static final PointableBinaryHashFunctionFactory UTF8STRING_LOWERCASE_POINTABLE_INSTANCE = new PointableBinaryHashFunctionFactory(
+ UTF8StringLowercasePointable.FACTORY);
+
private AqlBinaryHashFunctionFactoryProvider() {
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
new file mode 100644
index 0000000..bc7ba26
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlBinaryHashFunctionFamilyProvider.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.formats.nontagged;
+
+import java.io.Serializable;
+
+import edu.uci.ics.asterix.dataflow.data.nontagged.hash.MurmurHash3BinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+
+/**
+ * We use a type-independent binary hash function family from the hyracks
+ * codebase
+ */
+public class AqlBinaryHashFunctionFamilyProvider implements
+ IBinaryHashFunctionFamilyProvider, Serializable {
+
+ private static final long serialVersionUID = 1L;
+ public static final AqlBinaryHashFunctionFamilyProvider INSTANCE = new AqlBinaryHashFunctionFamilyProvider();
+
+ private AqlBinaryHashFunctionFamilyProvider() {
+
+ }
+
+ @Override
+ public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
+ throws AlgebricksException {
+ return MurmurHash3BinaryHashFunctionFamily.INSTANCE;
+ }
+
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java
index fe953dc..23306ed 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlNormalizedKeyComputerFactoryProvider.java
@@ -1,12 +1,15 @@
package edu.uci.ics.asterix.formats.nontagged;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AInt32AscNormalizedKeyComputerFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AInt32DescNormalizedKeyComputerFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AStringAscNormalizedKeyComputerFactory;
-import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AStringDescNormalizedKeyComputerFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AWrappedAscNormalizedKeyComputerFactory;
+import edu.uci.ics.asterix.dataflow.data.nontagged.keynormalizers.AWrappedDescNormalizedKeyComputerFactory;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.DoubleNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.FloatNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.Integer64NormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.IntegerNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
public class AqlNormalizedKeyComputerFactoryProvider implements INormalizedKeyComputerFactoryProvider {
@@ -21,10 +24,19 @@
if (ascending) {
switch (aqlType.getTypeTag()) {
case INT32: {
- return AInt32AscNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedAscNormalizedKeyComputerFactory(new IntegerNormalizedKeyComputerFactory());
+ }
+ case INT64: {
+ return new AWrappedAscNormalizedKeyComputerFactory(new Integer64NormalizedKeyComputerFactory());
+ }
+ case FLOAT: {
+ return new AWrappedAscNormalizedKeyComputerFactory(new FloatNormalizedKeyComputerFactory());
+ }
+ case DOUBLE: {
+ return new AWrappedAscNormalizedKeyComputerFactory(new DoubleNormalizedKeyComputerFactory());
}
case STRING: {
- return AStringAscNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedAscNormalizedKeyComputerFactory(new UTF8StringNormalizedKeyComputerFactory());
}
default: {
return null;
@@ -33,10 +45,19 @@
} else {
switch (aqlType.getTypeTag()) {
case INT32: {
- return AInt32DescNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedDescNormalizedKeyComputerFactory(new IntegerNormalizedKeyComputerFactory());
+ }
+ case INT64: {
+ return new AWrappedDescNormalizedKeyComputerFactory(new Integer64NormalizedKeyComputerFactory());
+ }
+ case FLOAT: {
+ return new AWrappedDescNormalizedKeyComputerFactory(new FloatNormalizedKeyComputerFactory());
+ }
+ case DOUBLE: {
+ return new AWrappedDescNormalizedKeyComputerFactory(new DoubleNormalizedKeyComputerFactory());
}
case STRING: {
- return AStringDescNormalizedKeyComputerFactory.INSTANCE;
+ return new AWrappedDescNormalizedKeyComputerFactory(new UTF8StringNormalizedKeyComputerFactory());
}
default: {
return null;
@@ -44,5 +65,4 @@
}
}
}
-
}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
index d4d4fb6..5492c5c 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/formats/nontagged/AqlSerializerDeserializerProvider.java
@@ -48,7 +48,7 @@
private AqlSerializerDeserializerProvider() {
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings("rawtypes")
@Override
public ISerializerDeserializer getSerializerDeserializer(Object typeInfo) {
IAType aqlType = (IAType) typeInfo;
@@ -63,7 +63,7 @@
}
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings("rawtypes")
public ISerializerDeserializer getNonTaggedSerializerDeserializer(IAType aqlType) {
switch (aqlType.getTypeTag()) {
case CIRCLE: {
@@ -139,7 +139,7 @@
}
}
- @SuppressWarnings("unchecked")
+ @SuppressWarnings("rawtypes")
private ISerializerDeserializer addTag(final ISerializerDeserializer nonTaggedSerde, final ATypeTag typeTag) {
return new ISerializerDeserializer<IAObject>() {
@@ -148,13 +148,15 @@
@Override
public IAObject deserialize(DataInput in) throws HyracksDataException {
try {
- ATypeTag typeTag = SerializerDeserializerUtil.deserializeTag(in);
+ //deserialize the tag to move the input cursor forward
+ SerializerDeserializerUtil.deserializeTag(in);
} catch (IOException e) {
throw new HyracksDataException(e);
}
return (IAObject) nonTaggedSerde.deserialize(in);
}
+ @SuppressWarnings("unchecked")
@Override
public void serialize(IAObject instance, DataOutput out) throws HyracksDataException {
SerializerDeserializerUtil.serializeTag(instance, out);
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
index 42904fc..2d4419f 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/functions/AsterixBuiltinFunctions.java
@@ -38,6 +38,7 @@
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedSumTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedSwitchCaseComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.NonTaggedUnaryMinusTypeComputer;
+import edu.uci.ics.asterix.om.typecomputer.impl.NotNullTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OpenRecordConstructorResultType;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalACircleTypeComputer;
import edu.uci.ics.asterix.om.typecomputer.impl.OptionalADateTimeTypeComputer;
@@ -235,11 +236,14 @@
public final static FunctionIdentifier AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-avg", 1);
public final static FunctionIdentifier COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-count", 1);
public final static FunctionIdentifier SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sum", 1);
- public final static FunctionIdentifier LOCAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sum", 1);
+ public final static FunctionIdentifier LOCAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "agg-local-sum", 1);
public final static FunctionIdentifier MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-max", 1);
- public final static FunctionIdentifier LOCAL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-max", 1);
+ public final static FunctionIdentifier LOCAL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "agg-local-max", 1);
public final static FunctionIdentifier MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-min", 1);
- public final static FunctionIdentifier LOCAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-min", 1);
+ public final static FunctionIdentifier LOCAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+ "agg-local-min", 1);
public final static FunctionIdentifier GLOBAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"agg-global-avg", 1);
public final static FunctionIdentifier LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
@@ -394,12 +398,17 @@
public final static FunctionIdentifier CAST_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
"cast-record", 1);
- public final static FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-x", 1);
- public final static FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-y", 1);
- public final static FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-radius", 1);
- public final static FunctionIdentifier GET_CIRCLE_CENTER_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-center", 1);
- public final static FunctionIdentifier GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-points", 1);
-
+ public final static FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-x", 1);
+ public final static FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-y", 1);
+ public final static FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-radius", 1);
+ public final static FunctionIdentifier GET_CIRCLE_CENTER_ACCESSOR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-center", 1);
+ public final static FunctionIdentifier GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR = new FunctionIdentifier(
+ FunctionConstants.ASTERIX_NS, "get-points", 1);
+
public static final FunctionIdentifier EQ = AlgebricksBuiltinFunctions.EQ;
public static final FunctionIdentifier LE = AlgebricksBuiltinFunctions.LE;
public static final FunctionIdentifier GE = AlgebricksBuiltinFunctions.GE;
@@ -412,6 +421,9 @@
public static final FunctionIdentifier NUMERIC_ADD = AlgebricksBuiltinFunctions.NUMERIC_ADD;
public static final FunctionIdentifier IS_NULL = AlgebricksBuiltinFunctions.IS_NULL;
+ public static final FunctionIdentifier NOT_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "not-null",
+ 1);
+
public static IFunctionInfo getAsterixFunctionInfo(FunctionIdentifier fid) {
IFunctionInfo finfo = finfoRepo.get(fid);;
if (finfo == null) {
@@ -440,6 +452,7 @@
add(NUMERIC_ADD, NonTaggedNumericAddSubMulDivTypeComputer.INSTANCE);
// and then, Asterix builtin functions
+ add(NOT_NULL, NotNullTypeComputer.INSTANCE);
add(ANY_COLLECTION_MEMBER, NonTaggedCollectionMemberResultType.INSTANCE);
addPrivateFunction(AVG, OptionalADoubleTypeComputer.INSTANCE);
add(BOOLEAN_CONSTRUCTOR, UnaryBooleanOrNullFunctionTypeComputer.INSTANCE);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AFlatValuePointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AFlatValuePointable.java
similarity index 89%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AFlatValuePointable.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AFlatValuePointable.java
index a3547a4..445e2a2 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AFlatValuePointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AFlatValuePointable.java
@@ -13,13 +13,13 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables;
+package edu.uci.ics.asterix.om.pointables;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.visitor.IVisitablePointableVisitor;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.pointables.visitor.IVisitablePointableVisitor;
-import edu.uci.ics.asterix.runtime.util.container.IObjectFactory;
+import edu.uci.ics.asterix.om.util.container.IObjectFactory;
import edu.uci.ics.hyracks.data.std.api.IValueReference;
/**
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AListPointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java
similarity index 91%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AListPointable.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java
index 6f85694..ab32b6b 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AListPointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AListPointable.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables;
+package edu.uci.ics.asterix.om.pointables;
import java.io.DataOutputStream;
import java.util.ArrayList;
@@ -21,21 +21,21 @@
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.visitor.IVisitablePointableVisitor;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AbstractCollectionType;
import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.pointables.visitor.IVisitablePointableVisitor;
-import edu.uci.ics.asterix.runtime.util.ResettableByteArrayOutputStream;
-import edu.uci.ics.asterix.runtime.util.container.IObjectFactory;
+import edu.uci.ics.asterix.om.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.asterix.om.util.container.IObjectFactory;
/**
* This class interprets the binary data representation of a list, one can
* call getItems and getItemTags to get pointable objects for items and item
* type tags.
- *
*/
public class AListPointable extends AbstractVisitablePointable {
@@ -59,6 +59,7 @@
private IAType itemType;
private ATypeTag itemTag;
private boolean typedItemList = false;
+ private boolean ordered = false;
/**
* private constructor, to prevent constructing it arbitrarily
@@ -66,6 +67,9 @@
* @param inputType
*/
private AListPointable(AbstractCollectionType inputType) {
+ if (inputType instanceof AOrderedListType) {
+ ordered = true;
+ }
if (inputType != null && inputType.getItemType() != null) {
itemType = inputType.getItemType();
if (itemType.getTypeTag() == ATypeTag.ANY) {
@@ -136,7 +140,7 @@
itemTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(b[itemOffset]);
itemLength = NonTaggedFormatUtil.getFieldValueLength(b, itemOffset, itemTag, true) + 1;
IVisitablePointable tag = allocator.allocateEmpty();
- IVisitablePointable item = allocator.allocateFieldValue(itemType);
+ IVisitablePointable item = allocator.allocateFieldValue(itemTag);
// set item type tag
int start = dataBos.size();
@@ -168,4 +172,8 @@
public List<IVisitablePointable> getItemTags() {
return itemTags;
}
+
+ public boolean ordered() {
+ return ordered;
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/ARecordPointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java
similarity index 96%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/ARecordPointable.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java
index 5904170..7e097af 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/ARecordPointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/ARecordPointable.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables;
+package edu.uci.ics.asterix.om.pointables;
import java.io.DataOutputStream;
import java.io.IOException;
@@ -23,16 +23,16 @@
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.AqlNullWriterFactory;
import edu.uci.ics.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.visitor.IVisitablePointableVisitor;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.pointables.visitor.IVisitablePointableVisitor;
-import edu.uci.ics.asterix.runtime.util.ResettableByteArrayOutputStream;
-import edu.uci.ics.asterix.runtime.util.container.IObjectFactory;
+import edu.uci.ics.asterix.om.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.asterix.om.util.container.IObjectFactory;
import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
/**
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AbstractVisitablePointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AbstractVisitablePointable.java
similarity index 89%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AbstractVisitablePointable.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AbstractVisitablePointable.java
index 8cfe661..943a967 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/AbstractVisitablePointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/AbstractVisitablePointable.java
@@ -13,21 +13,20 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables;
+package edu.uci.ics.asterix.om.pointables;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
import edu.uci.ics.hyracks.data.std.api.IValueReference;
/**
* This class implements several "routine" methods in IVisitablePointable
* interface, so that subclasses do not need to repeat the same code.
- *
*/
public abstract class AbstractVisitablePointable implements IVisitablePointable {
private byte[] data;
- private int start;
- private int len;
+ private int start = -1;
+ private int len = -1;
@Override
public byte[] getByteArray() {
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/PointableAllocator.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/PointableAllocator.java
similarity index 90%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/PointableAllocator.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/PointableAllocator.java
index b6116be..5ad2ccb 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/PointableAllocator.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/PointableAllocator.java
@@ -13,14 +13,14 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables;
+package edu.uci.ics.asterix.om.pointables;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.runtime.pointables.base.DefaultOpenFieldType;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.util.container.IObjectPool;
-import edu.uci.ics.asterix.runtime.util.container.ListObjectPool;
+import edu.uci.ics.asterix.om.util.container.IObjectPool;
+import edu.uci.ics.asterix.om.util.container.ListObjectPool;
/**
* This class is the ONLY place to create IVisitablePointable object instances,
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/base/DefaultOpenFieldType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
similarity index 97%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/base/DefaultOpenFieldType.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
index 987ac02..9184616 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/base/DefaultOpenFieldType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/DefaultOpenFieldType.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables.base;
+package edu.uci.ics.asterix.om.pointables.base;
import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/base/IVisitablePointable.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/IVisitablePointable.java
similarity index 88%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/base/IVisitablePointable.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/IVisitablePointable.java
index 28c61c2..7456ee8 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/base/IVisitablePointable.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/base/IVisitablePointable.java
@@ -13,10 +13,10 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables.base;
+package edu.uci.ics.asterix.om.pointables.base;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.runtime.pointables.visitor.IVisitablePointableVisitor;
+import edu.uci.ics.asterix.om.pointables.visitor.IVisitablePointableVisitor;
import edu.uci.ics.hyracks.data.std.api.IPointable;
/**
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/ACastVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ACastVisitor.java
similarity index 80%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/ACastVisitor.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ACastVisitor.java
index 822e37c..9fccf4a 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/ACastVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ACastVisitor.java
@@ -13,31 +13,31 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables.cast;
+package edu.uci.ics.asterix.om.pointables.cast;
import java.util.HashMap;
import java.util.Map;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.pointables.AFlatValuePointable;
+import edu.uci.ics.asterix.om.pointables.AListPointable;
+import edu.uci.ics.asterix.om.pointables.ARecordPointable;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.visitor.IVisitablePointableVisitor;
import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AbstractCollectionType;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.runtime.pointables.AFlatValuePointable;
-import edu.uci.ics.asterix.runtime.pointables.AListPointable;
-import edu.uci.ics.asterix.runtime.pointables.ARecordPointable;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.pointables.visitor.IVisitablePointableVisitor;
import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
/**
* This class is a IVisitablePointableVisitor implementation which recursively
* visit a given record, list or flat value of a given type, and cast it to a
* specified type. For example:
- *
* A record { "hobby": {{"music", "coding"}}, "id": "001", "name":
* "Person Three"} which confirms to closed type ( id: string, name: string,
* hobby: {{string}}? ) can be casted to a open type (id: string )
- *
* Since the open/closed part of a record has a completely different underlying
* memory/storage layout, the visitor will change the layout as specified at
* runtime.
@@ -56,6 +56,9 @@
laccessorToCaster.put(accessor, caster);
}
try {
+ if (arg.second.getTypeTag().equals(ATypeTag.ANY)) {
+ arg.second = DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE;
+ }
caster.castList(accessor, arg.first, (AbstractCollectionType) arg.second, this);
} catch (Exception e) {
throw new AsterixException(e);
@@ -72,6 +75,9 @@
raccessorToCaster.put(accessor, caster);
}
try {
+ if (arg.second.getTypeTag().equals(ATypeTag.ANY)) {
+ arg.second = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
+ }
caster.castRecord(accessor, arg.first, (ARecordType) arg.second, this);
} catch (Exception e) {
throw new AsterixException(e);
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/AListCaster.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/AListCaster.java
similarity index 90%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/AListCaster.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/AListCaster.java
index e10fb72..c65b3eb 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/AListCaster.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/AListCaster.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables.cast;
+package edu.uci.ics.asterix.om.pointables.cast;
import java.io.DataOutput;
import java.io.DataOutputStream;
@@ -23,17 +23,17 @@
import edu.uci.ics.asterix.builders.OrderedListBuilder;
import edu.uci.ics.asterix.builders.UnorderedListBuilder;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.pointables.AListPointable;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AUnorderedListType;
import edu.uci.ics.asterix.om.types.AbstractCollectionType;
import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.runtime.pointables.AListPointable;
-import edu.uci.ics.asterix.runtime.pointables.PointableAllocator;
-import edu.uci.ics.asterix.runtime.pointables.base.DefaultOpenFieldType;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.asterix.om.util.ResettableByteArrayOutputStream;
import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
/**
@@ -64,9 +64,11 @@
AbstractCollectionType reqType, ACastVisitor visitor) throws IOException, AsterixException {
if (reqType.getTypeTag().equals(ATypeTag.UNORDEREDLIST)) {
unOrderedListBuilder.reset((AUnorderedListType) reqType);
+ reqItemType = reqType.getItemType();
}
if (reqType.getTypeTag().equals(ATypeTag.ORDEREDLIST)) {
orderedListBuilder.reset((AOrderedListType) reqType);
+ reqItemType = reqType.getItemType();
}
dataBos.reset();
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/ARecordCaster.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java
similarity index 87%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/ARecordCaster.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java
index 8ef2ef0..494ea6f 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/cast/ARecordCaster.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/cast/ARecordCaster.java
@@ -13,28 +13,32 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables.cast;
+package edu.uci.ics.asterix.om.pointables.cast;
+import java.io.ByteArrayOutputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
+import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import edu.uci.ics.asterix.builders.RecordBuilder;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
import edu.uci.ics.asterix.dataflow.data.nontagged.AqlNullWriterFactory;
+import edu.uci.ics.asterix.om.pointables.ARecordPointable;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.printer.APrintVisitor;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
import edu.uci.ics.asterix.om.types.AUnionType;
import edu.uci.ics.asterix.om.types.EnumDeserializer;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.asterix.runtime.pointables.ARecordPointable;
-import edu.uci.ics.asterix.runtime.pointables.PointableAllocator;
-import edu.uci.ics.asterix.runtime.pointables.base.DefaultOpenFieldType;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.asterix.om.util.ResettableByteArrayOutputStream;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
@@ -184,7 +188,7 @@
}
private void matchClosedPart(List<IVisitablePointable> fieldNames, List<IVisitablePointable> fieldTypeTags,
- List<IVisitablePointable> fieldValues) {
+ List<IVisitablePointable> fieldValues) throws AsterixException {
// sort-merge based match
quickSort(fieldNamesSortedIndex, fieldNames, 0, numInputFields - 1);
int fnStart = 0;
@@ -213,8 +217,30 @@
// check unmatched fields in the input type
for (int i = 0; i < openFields.length; i++) {
- if (openFields[i] == true && !cachedReqType.isOpen())
- throw new IllegalStateException("type mismatch: including extra closed fields");
+ if (openFields[i] == true && !cachedReqType.isOpen()) {
+ //print the field name
+ IVisitablePointable fieldName = fieldNames.get(i);
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ PrintStream ps = new PrintStream(bos);
+ APrintVisitor printVisitor = new APrintVisitor();
+ Pair<PrintStream, ATypeTag> visitorArg = new Pair<PrintStream, ATypeTag>(ps, ATypeTag.STRING);
+ fieldName.accept(printVisitor, visitorArg);
+
+ //print the colon
+ ps.print(":");
+
+ //print the field type
+ IVisitablePointable fieldType = fieldTypeTags.get(i);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(fieldType.getByteArray()[fieldType
+ .getStartOffset()]);
+ ps.print(typeTag);
+
+ //collect the output message
+ byte[] output = bos.toByteArray();
+
+ //throw the exception
+ throw new IllegalStateException("type mismatch: including an extra field " + new String(output));
+ }
}
// check unmatched fields in the required type
@@ -223,7 +249,8 @@
IAType t = cachedReqType.getFieldTypes()[i];
if (!(t.getTypeTag() == ATypeTag.UNION && NonTaggedFormatUtil.isOptionalField((AUnionType) t))) {
// no matched field in the input for a required closed field
- throw new IllegalStateException("type mismatch: miss a required closed field");
+ throw new IllegalStateException("type mismatch: miss a required closed field "
+ + cachedReqType.getFieldNames()[i] + ":" + t.getTypeName());
}
}
}
@@ -288,8 +315,7 @@
int j = right;
while (true) {
// grow from the left
- while (compare(names.get(index[++i]), names.get(index[right])) < 0)
- ;
+ while (compare(names.get(index[++i]), names.get(index[right])) < 0);
// lower from the right
while (compare(names.get(index[right]), names.get(index[--j])) < 0)
if (j == left)
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/AListPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/AListPrinter.java
new file mode 100644
index 0000000..283c916
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/AListPrinter.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.om.pointables.printer;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.pointables.AListPointable;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * This class is to print the content of a list. It is ONLY visible to
+ * APrintVisitor.
+ */
+class AListPrinter {
+ private static String LEFT_PAREN = "{{ ";
+ private static String RIGHT_PAREN = " }}";
+ private static String LEFT_PAREN_ORDERED = "[ ";
+ private static String RIGHT_PAREN_ORDERED = " ]";
+ private static String COMMA = ", ";
+
+ private final Pair<PrintStream, ATypeTag> itemVisitorArg = new Pair<PrintStream, ATypeTag>(null, null);
+ private String leftParen = LEFT_PAREN;
+ private String rightParen = RIGHT_PAREN;
+
+ public AListPrinter(boolean ordered) {
+ if (ordered) {
+ leftParen = LEFT_PAREN_ORDERED;
+ rightParen = RIGHT_PAREN_ORDERED;
+ }
+ }
+
+ public void printList(AListPointable listAccessor, PrintStream ps, APrintVisitor visitor) throws IOException,
+ AsterixException {
+ List<IVisitablePointable> itemTags = listAccessor.getItemTags();
+ List<IVisitablePointable> items = listAccessor.getItems();
+ itemVisitorArg.first = ps;
+
+ // print the beginning part
+ ps.print(leftParen);
+
+ // print item 0 to n-2
+ for (int i = 0; i < items.size() - 1; i++) {
+ printItem(visitor, itemTags, items, i);
+ // print the comma
+ ps.print(COMMA);
+ }
+
+ // print item n-1
+ if (items.size() > 0) {
+ printItem(visitor, itemTags, items, items.size() - 1);
+ }
+
+ // print the end part
+ ps.print(rightParen);
+ }
+
+ private void printItem(APrintVisitor visitor, List<IVisitablePointable> itemTags, List<IVisitablePointable> items,
+ int i) throws AsterixException {
+ IVisitablePointable itemTypeTag = itemTags.get(i);
+ IVisitablePointable item = items.get(i);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(itemTypeTag.getByteArray()[itemTypeTag
+ .getStartOffset()]);
+ itemVisitorArg.second = item.getLength() <= 1 ? ATypeTag.NULL : typeTag;
+ item.accept(visitor, itemVisitorArg);
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java
new file mode 100644
index 0000000..3664804
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/APrintVisitor.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.om.pointables.printer;
+
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ABooleanPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ACirclePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ADatePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ADateTimePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ADoublePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ADurationPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AFloatPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt16Printer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt32Printer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt64Printer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AInt8Printer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ALinePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ANullPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.APoint3DPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.APointPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.APolygonPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ARectanglePrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.AStringPrinter;
+import edu.uci.ics.asterix.dataflow.data.nontagged.printers.ATimePrinter;
+import edu.uci.ics.asterix.om.pointables.AFlatValuePointable;
+import edu.uci.ics.asterix.om.pointables.AListPointable;
+import edu.uci.ics.asterix.om.pointables.ARecordPointable;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.visitor.IVisitablePointableVisitor;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * This class is a IVisitablePointableVisitor implementation which recursively
+ * visit a given record, list or flat value of a given type, and print it to a
+ * PrintStream in adm format.
+ */
+public class APrintVisitor implements IVisitablePointableVisitor<Void, Pair<PrintStream, ATypeTag>> {
+
+ private final Map<IVisitablePointable, ARecordPrinter> raccessorToPrinter = new HashMap<IVisitablePointable, ARecordPrinter>();
+ private final Map<IVisitablePointable, AListPrinter> laccessorToPrinter = new HashMap<IVisitablePointable, AListPrinter>();
+
+ @Override
+ public Void visit(AListPointable accessor, Pair<PrintStream, ATypeTag> arg) throws AsterixException {
+ AListPrinter printer = laccessorToPrinter.get(accessor);
+ if (printer == null) {
+ printer = new AListPrinter(accessor.ordered());
+ laccessorToPrinter.put(accessor, printer);
+ }
+ try {
+ printer.printList(accessor, arg.first, this);
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visit(ARecordPointable accessor, Pair<PrintStream, ATypeTag> arg) throws AsterixException {
+ ARecordPrinter printer = raccessorToPrinter.get(accessor);
+ if (printer == null) {
+ printer = new ARecordPrinter();
+ raccessorToPrinter.put(accessor, printer);
+ }
+ try {
+ printer.printRecord(accessor, arg.first, this);
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+ return null;
+ }
+
+ @Override
+ public Void visit(AFlatValuePointable accessor, Pair<PrintStream, ATypeTag> arg) {
+ try {
+ byte[] b = accessor.getByteArray();
+ int s = accessor.getStartOffset();
+ int l = accessor.getLength();
+ PrintStream ps = arg.first;
+ ATypeTag typeTag = arg.second;
+ switch (typeTag) {
+ case INT8: {
+ AInt8Printer.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case INT16: {
+ AInt16Printer.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case INT32: {
+ AInt32Printer.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case INT64: {
+ AInt64Printer.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case NULL: {
+ ANullPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case BOOLEAN: {
+ ABooleanPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case FLOAT: {
+ AFloatPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case DOUBLE: {
+ ADoublePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case DATE: {
+ ADatePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case TIME: {
+ ATimePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case DATETIME: {
+ ADateTimePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case DURATION: {
+ ADurationPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case POINT: {
+ APointPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case POINT3D: {
+ APoint3DPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case LINE: {
+ ALinePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case POLYGON: {
+ APolygonPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case CIRCLE: {
+ ACirclePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case RECTANGLE: {
+ ARectanglePrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ case STRING: {
+ AStringPrinter.INSTANCE.print(b, s, l, ps);
+ break;
+ }
+ default: {
+ throw new NotImplementedException("No printer for type " + typeTag);
+ }
+ }
+ return null;
+ } catch (Exception e) {
+ throw new IllegalStateException(e);
+ }
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java
new file mode 100644
index 0000000..55f9447
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/printer/ARecordPrinter.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.om.pointables.printer;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.List;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.pointables.ARecordPointable;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.EnumDeserializer;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * This class is to print the content of a record. It is ONLY visible to
+ * APrintVisitor.
+ */
+class ARecordPrinter {
+ private static String LEFT_PAREN = "{ ";
+ private static String RIGHT_PAREN = " }";
+ private static String COMMA = ", ";
+ private static String COLON = ": ";
+
+ private final Pair<PrintStream, ATypeTag> nameVisitorArg = new Pair<PrintStream, ATypeTag>(null, ATypeTag.STRING);
+ private final Pair<PrintStream, ATypeTag> itemVisitorArg = new Pair<PrintStream, ATypeTag>(null, null);
+
+ public ARecordPrinter() {
+
+ }
+
+ public void printRecord(ARecordPointable recordAccessor, PrintStream ps, APrintVisitor visitor) throws IOException,
+ AsterixException {
+ List<IVisitablePointable> fieldNames = recordAccessor.getFieldNames();
+ List<IVisitablePointable> fieldTags = recordAccessor.getFieldTypeTags();
+ List<IVisitablePointable> fieldValues = recordAccessor.getFieldValues();
+
+ nameVisitorArg.first = ps;
+ itemVisitorArg.first = ps;
+
+ // print the beginning part
+ ps.print(LEFT_PAREN);
+
+ // print field 0 to n-2
+ for (int i = 0; i < fieldNames.size() - 1; i++) {
+ printField(ps, visitor, fieldNames, fieldTags, fieldValues, i);
+ // print the comma
+ ps.print(COMMA);
+ }
+
+ // print field n-1
+ if (fieldValues.size() > 0) {
+ printField(ps, visitor, fieldNames, fieldTags, fieldValues, fieldValues.size() - 1);
+ }
+
+ // print the end part
+ ps.print(RIGHT_PAREN);
+ }
+
+ private void printField(PrintStream ps, APrintVisitor visitor, List<IVisitablePointable> fieldNames,
+ List<IVisitablePointable> fieldTags, List<IVisitablePointable> fieldValues, int i) throws AsterixException {
+ IVisitablePointable itemTypeTag = fieldTags.get(i);
+ IVisitablePointable item = fieldValues.get(i);
+ ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(itemTypeTag.getByteArray()[itemTypeTag
+ .getStartOffset()]);
+ itemVisitorArg.second = item.getLength() <= 1 ? ATypeTag.NULL : typeTag;
+
+ // print field name
+ fieldNames.get(i).accept(visitor, nameVisitorArg);
+ ps.print(COLON);
+ // print field value
+ item.accept(visitor, itemVisitorArg);
+ }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/visitor/IVisitablePointableVisitor.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/visitor/IVisitablePointableVisitor.java
similarity index 82%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/visitor/IVisitablePointableVisitor.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/visitor/IVisitablePointableVisitor.java
index 669e3fc..bcb4b34 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/pointables/visitor/IVisitablePointableVisitor.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/pointables/visitor/IVisitablePointableVisitor.java
@@ -13,12 +13,12 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.pointables.visitor;
+package edu.uci.ics.asterix.om.pointables.visitor;
import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.runtime.pointables.AFlatValuePointable;
-import edu.uci.ics.asterix.runtime.pointables.AListPointable;
-import edu.uci.ics.asterix.runtime.pointables.ARecordPointable;
+import edu.uci.ics.asterix.om.pointables.AFlatValuePointable;
+import edu.uci.ics.asterix.om.pointables.AListPointable;
+import edu.uci.ics.asterix.om.pointables.ARecordPointable;
/**
* This interface is a visitor for all the three different IVisitablePointable
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FieldAccessByIndexResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FieldAccessByIndexResultType.java
index dbe338d..d4b11fa 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FieldAccessByIndexResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/FieldAccessByIndexResultType.java
@@ -38,6 +38,9 @@
}
IAType type0 = (IAType) obj;
ARecordType t0 = NonTaggedFieldAccessByNameResultType.getRecordTypeFromType(type0, expression);
+ if (t0 == null) {
+ return BuiltinType.ANY;
+ }
ILogicalExpression arg1 = f.getArguments().get(1).getValue();
if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
return BuiltinType.ANY;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java
index 4b8b32a9..db9d932 100644
--- a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NonTaggedFieldAccessByNameResultType.java
@@ -2,6 +2,7 @@
import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.constants.AsterixConstantValue;
+import edu.uci.ics.asterix.om.pointables.base.DefaultOpenFieldType;
import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.ATypeTag;
@@ -9,7 +10,6 @@
import edu.uci.ics.asterix.om.types.BuiltinType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
@@ -36,6 +36,9 @@
}
IAType type0 = (IAType) obj;
ARecordType t0 = getRecordTypeFromType(type0, expression);
+ if (t0 == null) {
+ return BuiltinType.ANY;
+ }
AbstractLogicalExpression arg1 = (AbstractLogicalExpression) f.getArguments().get(1).getValue();
if (arg1.getExpressionTag() != LogicalExpressionTag.CONSTANT) {
@@ -58,7 +61,7 @@
return (ARecordType) type0;
}
case ANY: {
- throw new NotImplementedException();
+ return DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
}
case UNION: {
AUnionType u = (AUnionType) type0;
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NotNullTypeComputer.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NotNullTypeComputer.java
new file mode 100644
index 0000000..a9881c2
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/typecomputer/impl/NotNullTypeComputer.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.om.typecomputer.impl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.asterix.om.typecomputer.base.IResultTypeComputer;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
+import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
+
+/**
+ * This class is the type computer for not-null function.
+ * If the input type is not a union, we just return it.
+ * If the input type is a union,
+ * case 1: we return a new union without null if the new union still has more than one types;
+ * case 2: we return the non-null item type in the original union if there are only null and it in the original union.
+ */
+public class NotNullTypeComputer implements IResultTypeComputer {
+
+ public static final NotNullTypeComputer INSTANCE = new NotNullTypeComputer();
+
+ @Override
+ public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
+ IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expression;
+ IAType type = (IAType) env.getType(f.getArguments().get(0).getValue());
+ if (type.getTypeTag() != ATypeTag.UNION) {
+ // directly return the input type if it is not a union
+ return type;
+ }
+
+ AUnionType unionType = (AUnionType) type;
+ List<IAType> items = new ArrayList<IAType>();
+ // copy the item types
+ items.addAll(unionType.getUnionList());
+
+ // remove null
+ for (int i = items.size() - 1; i >= 0; i--) {
+ IAType itemType = items.get(i);
+ if (itemType.getTypeTag() == ATypeTag.NULL) {
+ items.remove(i);
+ }
+ }
+ if (items.size() == 1) {
+ //only one type is left
+ return items.get(0);
+ } else {
+ //more than two types are left
+ return new AUnionType(items, unionType.getTypeName());
+ }
+ }
+}
diff --git a/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
new file mode 100644
index 0000000..76f3301
--- /dev/null
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/AsterixRuntimeUtil.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2009-2011 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.om.util;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
+
+/**
+ * Utility class for obtaining information on the set of Hyracks NodeController
+ * processes that are running on a given host.
+ */
+public class AsterixRuntimeUtil {
+
+ public static Set<String> getNodeControllersOnIP(String ipAddress)
+ throws Exception {
+ Map<String, Set<String>> nodeControllerInfo = getNodeControllerMap();
+ Set<String> nodeControllersAtLocation = nodeControllerInfo
+ .get(ipAddress);
+ return nodeControllersAtLocation;
+ }
+
+ public static List<String> getAllNodeControllers() throws Exception {
+ Collection<Set<String>> nodeControllersCollection = getNodeControllerMap()
+ .values();
+ List<String> nodeControllers = new ArrayList<String>();
+ for (Set<String> ncCollection : nodeControllersCollection) {
+ nodeControllers.addAll(ncCollection);
+ }
+ return nodeControllers;
+ }
+
+ public static Map<String, Set<String>> getNodeControllerMap()
+ throws Exception {
+ Map<String, Set<String>> map = new HashMap<String, Set<String>>();
+ AsterixAppContextInfoImpl.getInstance().getCCApplicationContext()
+ .getCCContext().getIPAddressNodeMap(map);
+ return map;
+ }
+
+ public static String getIPAddress(String hostname)
+ throws UnknownHostException {
+ String address = InetAddress.getByName(hostname).getHostAddress();
+ if (address.equals("127.0.1.1")) {
+ address = "127.0.0.1";
+ }
+ return address;
+ }
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/ResettableByteArrayOutputStream.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java
similarity index 88%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/ResettableByteArrayOutputStream.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java
index 968c3f2..e47d417 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/ResettableByteArrayOutputStream.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/ResettableByteArrayOutputStream.java
@@ -1,4 +1,4 @@
-package edu.uci.ics.asterix.runtime.util;
+package edu.uci.ics.asterix.om.util;
import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/IObjectFactory.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/IObjectFactory.java
similarity index 93%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/IObjectFactory.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/IObjectFactory.java
index 85f0fa3..bf11b76 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/IObjectFactory.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/IObjectFactory.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.util.container;
+package edu.uci.ics.asterix.om.util.container;
/**
* A factory interface to create objects.
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/IObjectPool.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/IObjectPool.java
similarity index 94%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/IObjectPool.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/IObjectPool.java
index dd2c3bf..4969cd5 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/IObjectPool.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/IObjectPool.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.util.container;
+package edu.uci.ics.asterix.om.util.container;
/**
* A reusable object pool interface.
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/ListObjectPool.java b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java
similarity index 97%
rename from asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/ListObjectPool.java
rename to asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java
index b8f8e3a..8d9057c 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/container/ListObjectPool.java
+++ b/asterix-om/src/main/java/edu/uci/ics/asterix/om/util/container/ListObjectPool.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.asterix.runtime.util.container;
+package edu.uci.ics.asterix.om.util.container;
import java.util.ArrayList;
import java.util.BitSet;
diff --git a/asterix-runtime/pom.xml b/asterix-runtime/pom.xml
index 77952a6..ddc1cfd 100644
--- a/asterix-runtime/pom.xml
+++ b/asterix-runtime/pom.xml
@@ -5,11 +5,7 @@
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-runtime</artifactId>
- <version>0.0.4-SNAPSHOT</version>
-
-
<build>
<plugins>
<plugin>
@@ -56,7 +52,6 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.2-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.asterix</groupId>
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java
index fd6b691..58cbd07 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/common/AsterixListAccessor.java
@@ -51,7 +51,7 @@
public void reset(byte[] listBytes, int start) throws AsterixException {
this.listBytes = listBytes;
this.start = start;
- listType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[start]);
+ listType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(listBytes[start]);
if (listType != ATypeTag.UNORDEREDLIST && listType != ATypeTag.ORDEREDLIST) {
throw new AsterixException("Unsupported type: " + listType);
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastRecordDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastRecordDescriptor.java
index 4c0a201..8bb4d49 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastRecordDescriptor.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/CastRecordDescriptor.java
@@ -5,12 +5,12 @@
import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.pointables.PointableAllocator;
+import edu.uci.ics.asterix.om.pointables.base.IVisitablePointable;
+import edu.uci.ics.asterix.om.pointables.cast.ACastVisitor;
import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.asterix.om.types.IAType;
import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
-import edu.uci.ics.asterix.runtime.pointables.PointableAllocator;
-import edu.uci.ics.asterix.runtime.pointables.base.IVisitablePointable;
-import edu.uci.ics.asterix.runtime.pointables.cast.ACastVisitor;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.common.utils.Triple;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NotNullDescriptor.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NotNullDescriptor.java
new file mode 100644
index 0000000..12e73f6
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/evaluators/functions/NotNullDescriptor.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.runtime.evaluators.functions;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.asterix.om.functions.AsterixBuiltinFunctions;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptor;
+import edu.uci.ics.asterix.om.functions.IFunctionDescriptorFactory;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
+import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
+import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
+import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
+
+/**
+ * This runtime function checks if the input is null.
+ * If the input is not null, just return it directly;
+ * Otherwise, throw a runtime exception.
+ */
+public class NotNullDescriptor extends AbstractScalarFunctionDynamicDescriptor {
+
+ private static final long serialVersionUID = 1L;
+ public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
+ public IFunctionDescriptor createFunctionDescriptor() {
+ return new NotNullDescriptor();
+ }
+ };
+ private final static byte SER_NULL_TYPE_TAG = ATypeTag.NULL.serialize();
+
+ @Override
+ public ICopyEvaluatorFactory createEvaluatorFactory(final ICopyEvaluatorFactory[] args) {
+ return new ICopyEvaluatorFactory() {
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public ICopyEvaluator createEvaluator(final IDataOutputProvider output) throws AlgebricksException {
+ return new ICopyEvaluator() {
+ private DataOutput out = output.getDataOutput();
+ private ArrayBackedValueStorage outInput = new ArrayBackedValueStorage();
+ private ICopyEvaluator eval = args[0].createEvaluator(outInput);
+ private String errorMessage = "The input value cannot be null!";
+
+ @Override
+ public void evaluate(IFrameTupleReference tuple) throws AlgebricksException {
+
+ try {
+ outInput.reset();
+ eval.evaluate(tuple);
+ byte[] data = outInput.getByteArray();
+ if (data[outInput.getStartOffset()] == SER_NULL_TYPE_TAG) {
+ throw new AlgebricksException(errorMessage);
+ }
+ out.write(data, outInput.getStartOffset(), outInput.getLength());
+ } catch (IOException e1) {
+ throw new AlgebricksException(errorMessage);
+ }
+ }
+ };
+ }
+ };
+ }
+
+ @Override
+ public FunctionIdentifier getIdentifier() {
+ return AsterixBuiltinFunctions.NOT_NULL;
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
index 92d4294..41ca4ed 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/formats/NonTaggedDataFormat.java
@@ -17,6 +17,7 @@
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryBooleanInspectorImpl;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.asterix.formats.nontagged.AqlBinaryHashFunctionFamilyProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlBinaryIntegerInspector;
import edu.uci.ics.asterix.formats.nontagged.AqlNormalizedKeyComputerFactoryProvider;
import edu.uci.ics.asterix.formats.nontagged.AqlPrinterFactoryProvider;
@@ -124,6 +125,7 @@
import edu.uci.ics.asterix.runtime.evaluators.functions.LenDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.LikeDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NotDescriptor;
+import edu.uci.ics.asterix.runtime.evaluators.functions.NotNullDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericAbsDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericAddDescriptor;
import edu.uci.ics.asterix.runtime.evaluators.functions.NumericCeilingDescriptor;
@@ -197,6 +199,7 @@
import edu.uci.ics.hyracks.algebricks.data.IBinaryBooleanInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
+import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
import edu.uci.ics.hyracks.algebricks.data.INormalizedKeyComputerFactoryProvider;
import edu.uci.ics.hyracks.algebricks.data.IPrinterFactoryProvider;
@@ -218,564 +221,628 @@
public class NonTaggedDataFormat implements IDataFormat {
- private static boolean registered = false;
+ private static boolean registered = false;
- public static final NonTaggedDataFormat INSTANCE = new NonTaggedDataFormat();
+ public static final NonTaggedDataFormat INSTANCE = new NonTaggedDataFormat();
- private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
+ private static LogicalVariable METADATA_DUMMY_VAR = new LogicalVariable(-1);
- private static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
+ private static final HashMap<ATypeTag, IValueParserFactory> typeToValueParserFactMap = new HashMap<ATypeTag, IValueParserFactory>();
- public static final String NON_TAGGED_DATA_FORMAT = "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat";
+ public static final String NON_TAGGED_DATA_FORMAT = "edu.uci.ics.asterix.runtime.formats.NonTaggedDataFormat";
- static {
- typeToValueParserFactMap.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
- typeToValueParserFactMap.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
- }
+ static {
+ typeToValueParserFactMap.put(ATypeTag.INT32,
+ IntegerParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.FLOAT,
+ FloatParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.DOUBLE,
+ DoubleParserFactory.INSTANCE);
+ typeToValueParserFactMap
+ .put(ATypeTag.INT64, LongParserFactory.INSTANCE);
+ typeToValueParserFactMap.put(ATypeTag.STRING,
+ UTF8StringParserFactory.INSTANCE);
+ }
- public NonTaggedDataFormat() {
- }
+ public NonTaggedDataFormat() {
+ }
- public void registerRuntimeFunctions() throws AlgebricksException {
+ public void registerRuntimeFunctions() throws AlgebricksException {
- if (registered) {
- return;
- }
- registered = true;
+ if (registered) {
+ return;
+ }
+ registered = true;
- if (FunctionManagerHolder.getFunctionManager() != null) {
- return;
- }
+ if (FunctionManagerHolder.getFunctionManager() != null) {
+ return;
+ }
- List<IFunctionDescriptorFactory> temp = new ArrayList<IFunctionDescriptorFactory>();
+ List<IFunctionDescriptorFactory> temp = new ArrayList<IFunctionDescriptorFactory>();
- // format-independent
- temp.add(ContainsDescriptor.FACTORY);
- temp.add(EndsWithDescriptor.FACTORY);
- temp.add(StartsWithDescriptor.FACTORY);
- temp.add(SubstringDescriptor.FACTORY);
- temp.add(TidRunningAggregateDescriptor.FACTORY);
+ // format-independent
+ temp.add(ContainsDescriptor.FACTORY);
+ temp.add(EndsWithDescriptor.FACTORY);
+ temp.add(StartsWithDescriptor.FACTORY);
+ temp.add(SubstringDescriptor.FACTORY);
+ temp.add(TidRunningAggregateDescriptor.FACTORY);
- // format-dependent
- temp.add(AndDescriptor.FACTORY);
- temp.add(OrDescriptor.FACTORY);
- temp.add(LikeDescriptor.FACTORY);
- temp.add(YearDescriptor.FACTORY);
- temp.add(ScanCollectionDescriptor.FACTORY);
- temp.add(AnyCollectionMemberDescriptor.FACTORY);
- temp.add(ClosedRecordConstructorDescriptor.FACTORY);
- temp.add(FieldAccessByIndexDescriptor.FACTORY);
- temp.add(FieldAccessByNameDescriptor.FACTORY);
- temp.add(GetItemDescriptor.FACTORY);
- temp.add(NumericUnaryMinusDescriptor.FACTORY);
- temp.add(OpenRecordConstructorDescriptor.FACTORY);
- temp.add(OrderedListConstructorDescriptor.FACTORY);
- temp.add(UnorderedListConstructorDescriptor.FACTORY);
- temp.add(EmbedTypeDescriptor.FACTORY);
+ // format-dependent
+ temp.add(AndDescriptor.FACTORY);
+ temp.add(OrDescriptor.FACTORY);
+ temp.add(LikeDescriptor.FACTORY);
+ temp.add(YearDescriptor.FACTORY);
+ temp.add(ScanCollectionDescriptor.FACTORY);
+ temp.add(AnyCollectionMemberDescriptor.FACTORY);
+ temp.add(ClosedRecordConstructorDescriptor.FACTORY);
+ temp.add(FieldAccessByIndexDescriptor.FACTORY);
+ temp.add(FieldAccessByNameDescriptor.FACTORY);
+ temp.add(GetItemDescriptor.FACTORY);
+ temp.add(NumericUnaryMinusDescriptor.FACTORY);
+ temp.add(OpenRecordConstructorDescriptor.FACTORY);
+ temp.add(OrderedListConstructorDescriptor.FACTORY);
+ temp.add(UnorderedListConstructorDescriptor.FACTORY);
+ temp.add(EmbedTypeDescriptor.FACTORY);
- temp.add(NumericAddDescriptor.FACTORY);
- temp.add(NumericDivideDescriptor.FACTORY);
- temp.add(NumericMultiplyDescriptor.FACTORY);
- temp.add(NumericSubtractDescriptor.FACTORY);
- temp.add(NumericModuloDescriptor.FACTORY);
- temp.add(IsNullDescriptor.FACTORY);
- temp.add(NotDescriptor.FACTORY);
- temp.add(LenDescriptor.FACTORY);
- temp.add(EmptyStreamAggregateDescriptor.FACTORY);
- temp.add(NonEmptyStreamAggregateDescriptor.FACTORY);
- temp.add(RangeDescriptor.FACTORY);
+ temp.add(NumericAddDescriptor.FACTORY);
+ temp.add(NumericDivideDescriptor.FACTORY);
+ temp.add(NumericMultiplyDescriptor.FACTORY);
+ temp.add(NumericSubtractDescriptor.FACTORY);
+ temp.add(NumericModuloDescriptor.FACTORY);
+ temp.add(IsNullDescriptor.FACTORY);
+ temp.add(NotDescriptor.FACTORY);
+ temp.add(LenDescriptor.FACTORY);
+ temp.add(EmptyStreamAggregateDescriptor.FACTORY);
+ temp.add(NonEmptyStreamAggregateDescriptor.FACTORY);
+ temp.add(RangeDescriptor.FACTORY);
- temp.add(NumericAbsDescriptor.FACTORY);
- temp.add(NumericCeilingDescriptor.FACTORY);
- temp.add(NumericFloorDescriptor.FACTORY);
- temp.add(NumericRoundDescriptor.FACTORY);
- temp.add(NumericRoundHalfToEvenDescriptor.FACTORY);
- temp.add(NumericRoundHalfToEven2Descriptor.FACTORY);
- // String functions
- temp.add(StringEqualDescriptor.FACTORY);
- temp.add(StringStartWithDescrtiptor.FACTORY);
- temp.add(StringEndWithDescrtiptor.FACTORY);
- temp.add(StringMatchesDescriptor.FACTORY);
- temp.add(StringLowerCaseDescriptor.FACTORY);
- temp.add(StringMatchesWithFlagDescriptor.FACTORY);
- temp.add(StringReplaceDescriptor.FACTORY);
- temp.add(StringReplaceWithFlagsDescriptor.FACTORY);
- temp.add(StringLengthDescriptor.FACTORY);
- temp.add(Substring2Descriptor.FACTORY);
- temp.add(SubstringBeforeDescriptor.FACTORY);
- temp.add(SubstringAfterDescriptor.FACTORY);
- temp.add(StringToCodePointDescriptor.FACTORY);
- temp.add(CodePointToStringDescriptor.FACTORY);
- temp.add(StringConcatDescriptor.FACTORY);
- temp.add(StringJoinDescriptor.FACTORY);
+ temp.add(NumericAbsDescriptor.FACTORY);
+ temp.add(NumericCeilingDescriptor.FACTORY);
+ temp.add(NumericFloorDescriptor.FACTORY);
+ temp.add(NumericRoundDescriptor.FACTORY);
+ temp.add(NumericRoundHalfToEvenDescriptor.FACTORY);
+ temp.add(NumericRoundHalfToEven2Descriptor.FACTORY);
+ // String functions
+ temp.add(StringEqualDescriptor.FACTORY);
+ temp.add(StringStartWithDescrtiptor.FACTORY);
+ temp.add(StringEndWithDescrtiptor.FACTORY);
+ temp.add(StringMatchesDescriptor.FACTORY);
+ temp.add(StringLowerCaseDescriptor.FACTORY);
+ temp.add(StringMatchesWithFlagDescriptor.FACTORY);
+ temp.add(StringReplaceDescriptor.FACTORY);
+ temp.add(StringReplaceWithFlagsDescriptor.FACTORY);
+ temp.add(StringLengthDescriptor.FACTORY);
+ temp.add(Substring2Descriptor.FACTORY);
+ temp.add(SubstringBeforeDescriptor.FACTORY);
+ temp.add(SubstringAfterDescriptor.FACTORY);
+ temp.add(StringToCodePointDescriptor.FACTORY);
+ temp.add(CodePointToStringDescriptor.FACTORY);
+ temp.add(StringConcatDescriptor.FACTORY);
+ temp.add(StringJoinDescriptor.FACTORY);
- // aggregates
- temp.add(ListifyAggregateDescriptor.FACTORY);
- temp.add(CountAggregateDescriptor.FACTORY);
- temp.add(AvgAggregateDescriptor.FACTORY);
- temp.add(LocalAvgAggregateDescriptor.FACTORY);
- temp.add(GlobalAvgAggregateDescriptor.FACTORY);
- temp.add(SumAggregateDescriptor.FACTORY);
- temp.add(LocalSumAggregateDescriptor.FACTORY);
- temp.add(MaxAggregateDescriptor.FACTORY);
- temp.add(LocalMaxAggregateDescriptor.FACTORY);
- temp.add(MinAggregateDescriptor.FACTORY);
- temp.add(LocalMinAggregateDescriptor.FACTORY);
+ // aggregates
+ temp.add(ListifyAggregateDescriptor.FACTORY);
+ temp.add(CountAggregateDescriptor.FACTORY);
+ temp.add(AvgAggregateDescriptor.FACTORY);
+ temp.add(LocalAvgAggregateDescriptor.FACTORY);
+ temp.add(GlobalAvgAggregateDescriptor.FACTORY);
+ temp.add(SumAggregateDescriptor.FACTORY);
+ temp.add(LocalSumAggregateDescriptor.FACTORY);
+ temp.add(MaxAggregateDescriptor.FACTORY);
+ temp.add(LocalMaxAggregateDescriptor.FACTORY);
+ temp.add(MinAggregateDescriptor.FACTORY);
+ temp.add(LocalMinAggregateDescriptor.FACTORY);
- // serializable aggregates
- temp.add(SerializableCountAggregateDescriptor.FACTORY);
- temp.add(SerializableAvgAggregateDescriptor.FACTORY);
- temp.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
- temp.add(SerializableGlobalAvgAggregateDescriptor.FACTORY);
- temp.add(SerializableSumAggregateDescriptor.FACTORY);
- temp.add(SerializableLocalSumAggregateDescriptor.FACTORY);
+ // serializable aggregates
+ temp.add(SerializableCountAggregateDescriptor.FACTORY);
+ temp.add(SerializableAvgAggregateDescriptor.FACTORY);
+ temp.add(SerializableLocalAvgAggregateDescriptor.FACTORY);
+ temp.add(SerializableGlobalAvgAggregateDescriptor.FACTORY);
+ temp.add(SerializableSumAggregateDescriptor.FACTORY);
+ temp.add(SerializableLocalSumAggregateDescriptor.FACTORY);
- // scalar aggregates
- temp.add(ScalarCountAggregateDescriptor.FACTORY);
- temp.add(ScalarAvgAggregateDescriptor.FACTORY);
- temp.add(ScalarSumAggregateDescriptor.FACTORY);
- temp.add(ScalarMaxAggregateDescriptor.FACTORY);
- temp.add(ScalarMinAggregateDescriptor.FACTORY);
+ // scalar aggregates
+ temp.add(ScalarCountAggregateDescriptor.FACTORY);
+ temp.add(ScalarAvgAggregateDescriptor.FACTORY);
+ temp.add(ScalarSumAggregateDescriptor.FACTORY);
+ temp.add(ScalarMaxAggregateDescriptor.FACTORY);
+ temp.add(ScalarMinAggregateDescriptor.FACTORY);
- // new functions - constructors
- temp.add(ABooleanConstructorDescriptor.FACTORY);
- temp.add(ANullConstructorDescriptor.FACTORY);
- temp.add(AStringConstructorDescriptor.FACTORY);
- temp.add(AInt8ConstructorDescriptor.FACTORY);
- temp.add(AInt16ConstructorDescriptor.FACTORY);
- temp.add(AInt32ConstructorDescriptor.FACTORY);
- temp.add(AInt64ConstructorDescriptor.FACTORY);
- temp.add(AFloatConstructorDescriptor.FACTORY);
- temp.add(ADoubleConstructorDescriptor.FACTORY);
- temp.add(APointConstructorDescriptor.FACTORY);
- temp.add(APoint3DConstructorDescriptor.FACTORY);
- temp.add(ALineConstructorDescriptor.FACTORY);
- temp.add(APolygonConstructorDescriptor.FACTORY);
- temp.add(ACircleConstructorDescriptor.FACTORY);
- temp.add(ARectangleConstructorDescriptor.FACTORY);
- temp.add(ATimeConstructorDescriptor.FACTORY);
- temp.add(ADateConstructorDescriptor.FACTORY);
- temp.add(ADateTimeConstructorDescriptor.FACTORY);
- temp.add(ADurationConstructorDescriptor.FACTORY);
+ // new functions - constructors
+ temp.add(ABooleanConstructorDescriptor.FACTORY);
+ temp.add(ANullConstructorDescriptor.FACTORY);
+ temp.add(AStringConstructorDescriptor.FACTORY);
+ temp.add(AInt8ConstructorDescriptor.FACTORY);
+ temp.add(AInt16ConstructorDescriptor.FACTORY);
+ temp.add(AInt32ConstructorDescriptor.FACTORY);
+ temp.add(AInt64ConstructorDescriptor.FACTORY);
+ temp.add(AFloatConstructorDescriptor.FACTORY);
+ temp.add(ADoubleConstructorDescriptor.FACTORY);
+ temp.add(APointConstructorDescriptor.FACTORY);
+ temp.add(APoint3DConstructorDescriptor.FACTORY);
+ temp.add(ALineConstructorDescriptor.FACTORY);
+ temp.add(APolygonConstructorDescriptor.FACTORY);
+ temp.add(ACircleConstructorDescriptor.FACTORY);
+ temp.add(ARectangleConstructorDescriptor.FACTORY);
+ temp.add(ATimeConstructorDescriptor.FACTORY);
+ temp.add(ADateConstructorDescriptor.FACTORY);
+ temp.add(ADateTimeConstructorDescriptor.FACTORY);
+ temp.add(ADurationConstructorDescriptor.FACTORY);
- // Spatial
- temp.add(CreatePointDescriptor.FACTORY);
- temp.add(CreateLineDescriptor.FACTORY);
- temp.add(CreatePolygonDescriptor.FACTORY);
- temp.add(CreateCircleDescriptor.FACTORY);
- temp.add(CreateRectangleDescriptor.FACTORY);
- temp.add(SpatialAreaDescriptor.FACTORY);
- temp.add(SpatialDistanceDescriptor.FACTORY);
- temp.add(SpatialIntersectDescriptor.FACTORY);
- temp.add(CreateMBRDescriptor.FACTORY);
- temp.add(SpatialCellDescriptor.FACTORY);
- temp.add(PointXCoordinateAccessor.FACTORY);
- temp.add(PointYCoordinateAccessor.FACTORY);
- temp.add(CircleRadiusAccessor.FACTORY);
- temp.add(CircleCenterAccessor.FACTORY);
- temp.add(LineRectanglePolygonAccessor.FACTORY);
+ // Spatial
+ temp.add(CreatePointDescriptor.FACTORY);
+ temp.add(CreateLineDescriptor.FACTORY);
+ temp.add(CreatePolygonDescriptor.FACTORY);
+ temp.add(CreateCircleDescriptor.FACTORY);
+ temp.add(CreateRectangleDescriptor.FACTORY);
+ temp.add(SpatialAreaDescriptor.FACTORY);
+ temp.add(SpatialDistanceDescriptor.FACTORY);
+ temp.add(SpatialIntersectDescriptor.FACTORY);
+ temp.add(CreateMBRDescriptor.FACTORY);
+ temp.add(SpatialCellDescriptor.FACTORY);
+ temp.add(PointXCoordinateAccessor.FACTORY);
+ temp.add(PointYCoordinateAccessor.FACTORY);
+ temp.add(CircleRadiusAccessor.FACTORY);
+ temp.add(CircleCenterAccessor.FACTORY);
+ temp.add(LineRectanglePolygonAccessor.FACTORY);
- // fuzzyjoin function
- temp.add(FuzzyEqDescriptor.FACTORY);
- temp.add(SubsetCollectionDescriptor.FACTORY);
- temp.add(PrefixLenJaccardDescriptor.FACTORY);
+ // fuzzyjoin function
+ temp.add(FuzzyEqDescriptor.FACTORY);
+ temp.add(SubsetCollectionDescriptor.FACTORY);
+ temp.add(PrefixLenJaccardDescriptor.FACTORY);
- temp.add(WordTokensDescriptor.FACTORY);
- temp.add(HashedWordTokensDescriptor.FACTORY);
- temp.add(CountHashedWordTokensDescriptor.FACTORY);
+ temp.add(WordTokensDescriptor.FACTORY);
+ temp.add(HashedWordTokensDescriptor.FACTORY);
+ temp.add(CountHashedWordTokensDescriptor.FACTORY);
- temp.add(GramTokensDescriptor.FACTORY);
- temp.add(HashedGramTokensDescriptor.FACTORY);
- temp.add(CountHashedGramTokensDescriptor.FACTORY);
+ temp.add(GramTokensDescriptor.FACTORY);
+ temp.add(HashedGramTokensDescriptor.FACTORY);
+ temp.add(CountHashedGramTokensDescriptor.FACTORY);
- temp.add(EditDistanceDescriptor.FACTORY);
- temp.add(EditDistanceCheckDescriptor.FACTORY);
- temp.add(EditDistanceStringIsFilterable.FACTORY);
- temp.add(EditDistanceListIsFilterable.FACTORY);
+ temp.add(EditDistanceDescriptor.FACTORY);
+ temp.add(EditDistanceCheckDescriptor.FACTORY);
+ temp.add(EditDistanceStringIsFilterable.FACTORY);
+ temp.add(EditDistanceListIsFilterable.FACTORY);
- temp.add(SimilarityJaccardDescriptor.FACTORY);
- temp.add(SimilarityJaccardCheckDescriptor.FACTORY);
- temp.add(SimilarityJaccardSortedDescriptor.FACTORY);
- temp.add(SimilarityJaccardSortedCheckDescriptor.FACTORY);
- temp.add(SimilarityJaccardPrefixDescriptor.FACTORY);
- temp.add(SimilarityJaccardPrefixCheckDescriptor.FACTORY);
+ temp.add(SimilarityJaccardDescriptor.FACTORY);
+ temp.add(SimilarityJaccardCheckDescriptor.FACTORY);
+ temp.add(SimilarityJaccardSortedDescriptor.FACTORY);
+ temp.add(SimilarityJaccardSortedCheckDescriptor.FACTORY);
+ temp.add(SimilarityJaccardPrefixDescriptor.FACTORY);
+ temp.add(SimilarityJaccardPrefixCheckDescriptor.FACTORY);
- temp.add(SwitchCaseDescriptor.FACTORY);
- temp.add(RegExpDescriptor.FACTORY);
- temp.add(InjectFailureDescriptor.FACTORY);
- temp.add(CastRecordDescriptor.FACTORY);
+ temp.add(SwitchCaseDescriptor.FACTORY);
+ temp.add(RegExpDescriptor.FACTORY);
+ temp.add(InjectFailureDescriptor.FACTORY);
+ temp.add(CastRecordDescriptor.FACTORY);
+ temp.add(NotNullDescriptor.FACTORY);
- IFunctionManager mgr = new FunctionManagerImpl();
- for (IFunctionDescriptorFactory fdFactory : temp) {
- mgr.registerFunction(fdFactory);
- }
- FunctionManagerHolder.setFunctionManager(mgr);
- }
+ IFunctionManager mgr = new FunctionManagerImpl();
+ for (IFunctionDescriptorFactory fdFactory : temp) {
+ mgr.registerFunction(fdFactory);
+ }
+ FunctionManagerHolder.setFunctionManager(mgr);
+ }
- @Override
- public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
- return AqlBinaryBooleanInspectorImpl.FACTORY;
- }
+ @Override
+ public IBinaryBooleanInspectorFactory getBinaryBooleanInspectorFactory() {
+ return AqlBinaryBooleanInspectorImpl.FACTORY;
+ }
- @Override
- public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
- return AqlBinaryComparatorFactoryProvider.INSTANCE;
- }
+ @Override
+ public IBinaryComparatorFactoryProvider getBinaryComparatorFactoryProvider() {
+ return AqlBinaryComparatorFactoryProvider.INSTANCE;
+ }
- @Override
- public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
- return AqlBinaryHashFunctionFactoryProvider.INSTANCE;
- }
+ @Override
+ public IBinaryHashFunctionFactoryProvider getBinaryHashFunctionFactoryProvider() {
+ return AqlBinaryHashFunctionFactoryProvider.INSTANCE;
+ }
- @Override
- public ISerializerDeserializerProvider getSerdeProvider() {
- return AqlSerializerDeserializerProvider.INSTANCE; // done
- }
+ @Override
+ public ISerializerDeserializerProvider getSerdeProvider() {
+ return AqlSerializerDeserializerProvider.INSTANCE; // done
+ }
- @Override
- public ITypeTraitProvider getTypeTraitProvider() {
- return AqlTypeTraitProvider.INSTANCE;
- }
+ @Override
+ public ITypeTraitProvider getTypeTraitProvider() {
+ return AqlTypeTraitProvider.INSTANCE;
+ }
- @SuppressWarnings("unchecked")
- @Override
- public ICopyEvaluatorFactory getFieldAccessEvaluatorFactory(ARecordType recType, String fldName, int recordColumn)
- throws AlgebricksException {
- String[] names = recType.getFieldNames();
- int n = names.length;
- for (int i = 0; i < n; i++) {
- if (names[i].equals(fldName)) {
- ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(recordColumn);
- ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
- DataOutput dos = abvs.getDataOutput();
- try {
- AInt32 ai = new AInt32(i);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai,
- dos);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(),
- abvs.getLength()));
- ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory,
- fldIndexEvalFactory, recType);
- return evalFactory;
- }
- }
- throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
- }
+ @SuppressWarnings("unchecked")
+ @Override
+ public ICopyEvaluatorFactory getFieldAccessEvaluatorFactory(
+ ARecordType recType, String fldName, int recordColumn)
+ throws AlgebricksException {
+ String[] names = recType.getFieldNames();
+ int n = names.length;
+ for (int i = 0; i < n; i++) {
+ if (names[i].equals(fldName)) {
+ ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(
+ recordColumn);
+ ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
+ DataOutput dos = abvs.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(i);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType()).serialize(
+ ai, dos);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
+ ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(
+ recordEvalFactory, fldIndexEvalFactory, recType);
+ return evalFactory;
+ }
+ }
+ throw new AlgebricksException("Could not find field " + fldName
+ + " in the schema.");
+ }
- @SuppressWarnings("unchecked")
- @Override
- public ICopyEvaluatorFactory[] createMBRFactory(ARecordType recType, String fldName, int recordColumn, int dimension)
- throws AlgebricksException {
- ICopyEvaluatorFactory evalFactory = getFieldAccessEvaluatorFactory(recType, fldName, recordColumn);
- int numOfFields = dimension * 2;
- ICopyEvaluatorFactory[] evalFactories = new ICopyEvaluatorFactory[numOfFields];
+ @SuppressWarnings("unchecked")
+ @Override
+ public ICopyEvaluatorFactory[] createMBRFactory(ARecordType recType,
+ String fldName, int recordColumn, int dimension)
+ throws AlgebricksException {
+ ICopyEvaluatorFactory evalFactory = getFieldAccessEvaluatorFactory(
+ recType, fldName, recordColumn);
+ int numOfFields = dimension * 2;
+ ICopyEvaluatorFactory[] evalFactories = new ICopyEvaluatorFactory[numOfFields];
- ArrayBackedValueStorage abvs1 = new ArrayBackedValueStorage();
- DataOutput dos1 = abvs1.getDataOutput();
- try {
- AInt32 ai = new AInt32(dimension);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos1);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory dimensionEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs1.getByteArray(),
- abvs1.getLength()));
+ ArrayBackedValueStorage abvs1 = new ArrayBackedValueStorage();
+ DataOutput dos1 = abvs1.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(dimension);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType())
+ .serialize(ai, dos1);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory dimensionEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs1.getByteArray(), abvs1.getLength()));
- for (int i = 0; i < numOfFields; i++) {
- ArrayBackedValueStorage abvs2 = new ArrayBackedValueStorage();
- DataOutput dos2 = abvs2.getDataOutput();
- try {
- AInt32 ai = new AInt32(i);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai, dos2);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory coordinateEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs2.getByteArray(),
- abvs2.getLength()));
+ for (int i = 0; i < numOfFields; i++) {
+ ArrayBackedValueStorage abvs2 = new ArrayBackedValueStorage();
+ DataOutput dos2 = abvs2.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(i);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType()).serialize(ai,
+ dos2);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory coordinateEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs2.getByteArray(), abvs2.getLength()));
- evalFactories[i] = new CreateMBREvalFactory(evalFactory, dimensionEvalFactory, coordinateEvalFactory);
- }
- return evalFactories;
- }
+ evalFactories[i] = new CreateMBREvalFactory(evalFactory,
+ dimensionEvalFactory, coordinateEvalFactory);
+ }
+ return evalFactories;
+ }
- @SuppressWarnings("unchecked")
- @Override
- public Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(
- ARecordType recType, String fldName) throws AlgebricksException {
- String[] names = recType.getFieldNames();
- int n = names.length;
- for (int i = 0; i < n; i++) {
- if (names[i].equals(fldName)) {
- ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(
- GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
- ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
- DataOutput dos = abvs.getDataOutput();
- try {
- AInt32 ai = new AInt32(i);
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(ai.getType()).serialize(ai,
- dos);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(),
- abvs.getLength()));
- ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(recordEvalFactory,
- fldIndexEvalFactory, recType);
- IFunctionInfo finfoAccess = AsterixBuiltinFunctions
- .getAsterixFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX);
+ @SuppressWarnings("unchecked")
+ @Override
+ public Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType> partitioningEvaluatorFactory(
+ ARecordType recType, String fldName) throws AlgebricksException {
+ String[] names = recType.getFieldNames();
+ int n = names.length;
+ for (int i = 0; i < n; i++) {
+ if (names[i].equals(fldName)) {
+ ICopyEvaluatorFactory recordEvalFactory = new ColumnAccessEvalFactory(
+ GlobalConfig.DEFAULT_INPUT_DATA_COLUMN);
+ ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
+ DataOutput dos = abvs.getDataOutput();
+ try {
+ AInt32 ai = new AInt32(i);
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(ai.getType()).serialize(
+ ai, dos);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ ICopyEvaluatorFactory fldIndexEvalFactory = new ConstantEvalFactory(
+ Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
+ ICopyEvaluatorFactory evalFactory = new FieldAccessByIndexEvalFactory(
+ recordEvalFactory, fldIndexEvalFactory, recType);
+ IFunctionInfo finfoAccess = AsterixBuiltinFunctions
+ .getAsterixFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX);
- ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(finfoAccess,
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(METADATA_DUMMY_VAR)),
- new MutableObject<ILogicalExpression>(new ConstantExpression(new AsterixConstantValue(
- new AInt32(i)))));
- return new Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType>(evalFactory,
- partitionFun, recType.getFieldTypes()[i]);
- }
- }
- throw new AlgebricksException("Could not find field " + fldName + " in the schema.");
- }
+ ScalarFunctionCallExpression partitionFun = new ScalarFunctionCallExpression(
+ finfoAccess,
+ new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(
+ METADATA_DUMMY_VAR)),
+ new MutableObject<ILogicalExpression>(
+ new ConstantExpression(
+ new AsterixConstantValue(new AInt32(i)))));
+ return new Triple<ICopyEvaluatorFactory, ScalarFunctionCallExpression, IAType>(
+ evalFactory, partitionFun, recType.getFieldTypes()[i]);
+ }
+ }
+ throw new AlgebricksException("Could not find field " + fldName
+ + " in the schema.");
+ }
- @Override
- public IFunctionDescriptor resolveFunction(ILogicalExpression expr, IVariableTypeEnvironment context)
- throws AlgebricksException {
- FunctionIdentifier fnId = ((AbstractFunctionCallExpression) expr).getFunctionIdentifier();
- IFunctionManager mgr = FunctionManagerHolder.getFunctionManager();
- IFunctionDescriptor fd = mgr.lookupFunction(fnId);
- if (fd == null) {
- throw new AsterixRuntimeException("Unresolved function " + fnId);
- }
- typeInference(expr, fd, context);
- return fd;
- }
+ @Override
+ public IFunctionDescriptor resolveFunction(ILogicalExpression expr,
+ IVariableTypeEnvironment context) throws AlgebricksException {
+ FunctionIdentifier fnId = ((AbstractFunctionCallExpression) expr)
+ .getFunctionIdentifier();
+ IFunctionManager mgr = FunctionManagerHolder.getFunctionManager();
+ IFunctionDescriptor fd = mgr.lookupFunction(fnId);
+ if (fd == null) {
+ throw new AsterixRuntimeException("Unresolved function " + fnId);
+ }
+ typeInference(expr, fd, context);
+ return fd;
+ }
- private void typeInference(ILogicalExpression expr, IFunctionDescriptor fd, IVariableTypeEnvironment context)
- throws AlgebricksException {
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.LISTIFY)) {
- AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
- if (f.getArguments().size() == 0) {
- ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(null, null));
- } else {
- IAType itemType = (IAType) context.getType(f.getArguments().get(0).getValue());
- // Convert UNION types into ANY.
- if (itemType instanceof AUnionType) {
- itemType = BuiltinType.ANY;
- }
- ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(itemType, null));
- }
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
- ARecordType rt = (ARecordType) TypeComputerUtilities.getRequiredType((AbstractFunctionCallExpression) expr);
- ARecordType it = (ARecordType) TypeComputerUtilities.getInputType((AbstractFunctionCallExpression) expr);
- ((CastRecordDescriptor) fd).reset(rt, it);
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
- ARecordType rt = (ARecordType) context.getType(expr);
- ((OpenRecordConstructorDescriptor) fd).reset(rt,
- computeOpenFields((AbstractFunctionCallExpression) expr, rt));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
- ((ClosedRecordConstructorDescriptor) fd).reset((ARecordType) context.getType(expr));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
- ((OrderedListConstructorDescriptor) fd).reset((AOrderedListType) context.getType(expr));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)) {
- ((UnorderedListConstructorDescriptor) fd).reset((AUnorderedListType) context.getType(expr));
- }
- if (fd.getIdentifier().equals(AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
- AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
- IAType t = (IAType) context.getType(fce.getArguments().get(0).getValue());
- switch (t.getTypeTag()) {
- case RECORD: {
- ARecordType recType = (ARecordType) t;
- ((FieldAccessByIndexDescriptor) fd).reset(recType);
- break;
- }
- case UNION: {
- AUnionType unionT = (AUnionType) t;
- if (unionT.isNullableType()) {
- IAType t2 = unionT.getUnionList().get(1);
- if (t2.getTypeTag() == ATypeTag.RECORD) {
- ARecordType recType = (ARecordType) t2;
- ((FieldAccessByIndexDescriptor) fd).reset(recType);
- break;
- }
- }
- throw new NotImplementedException("field-access-by-index for data of type " + t);
- }
- default: {
- throw new NotImplementedException("field-access-by-index for data of type " + t);
- }
- }
- }
- }
+ private void typeInference(ILogicalExpression expr, IFunctionDescriptor fd,
+ IVariableTypeEnvironment context) throws AlgebricksException {
+ if (fd.getIdentifier().equals(AsterixBuiltinFunctions.LISTIFY)) {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+ if (f.getArguments().size() == 0) {
+ ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(
+ null, null));
+ } else {
+ IAType itemType = (IAType) context.getType(f.getArguments()
+ .get(0).getValue());
+ // Convert UNION types into ANY.
+ if (itemType instanceof AUnionType) {
+ itemType = BuiltinType.ANY;
+ }
+ ((ListifyAggregateDescriptor) fd).reset(new AOrderedListType(
+ itemType, null));
+ }
+ }
+ if (fd.getIdentifier().equals(AsterixBuiltinFunctions.CAST_RECORD)) {
+ ARecordType rt = (ARecordType) TypeComputerUtilities
+ .getRequiredType((AbstractFunctionCallExpression) expr);
+ ARecordType it = (ARecordType) TypeComputerUtilities
+ .getInputType((AbstractFunctionCallExpression) expr);
+ ((CastRecordDescriptor) fd).reset(rt, it);
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR)) {
+ ARecordType rt = (ARecordType) context.getType(expr);
+ ((OpenRecordConstructorDescriptor) fd)
+ .reset(rt,
+ computeOpenFields(
+ (AbstractFunctionCallExpression) expr, rt));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
+ ((ClosedRecordConstructorDescriptor) fd)
+ .reset((ARecordType) context.getType(expr));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR)) {
+ ((OrderedListConstructorDescriptor) fd)
+ .reset((AOrderedListType) context.getType(expr));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR)) {
+ ((UnorderedListConstructorDescriptor) fd)
+ .reset((AUnorderedListType) context.getType(expr));
+ }
+ if (fd.getIdentifier().equals(
+ AsterixBuiltinFunctions.FIELD_ACCESS_BY_INDEX)) {
+ AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
+ IAType t = (IAType) context.getType(fce.getArguments().get(0)
+ .getValue());
+ switch (t.getTypeTag()) {
+ case RECORD: {
+ ARecordType recType = (ARecordType) t;
+ ((FieldAccessByIndexDescriptor) fd).reset(recType);
+ break;
+ }
+ case UNION: {
+ AUnionType unionT = (AUnionType) t;
+ if (unionT.isNullableType()) {
+ IAType t2 = unionT.getUnionList().get(1);
+ if (t2.getTypeTag() == ATypeTag.RECORD) {
+ ARecordType recType = (ARecordType) t2;
+ ((FieldAccessByIndexDescriptor) fd).reset(recType);
+ break;
+ }
+ }
+ throw new NotImplementedException(
+ "field-access-by-index for data of type " + t);
+ }
+ default: {
+ throw new NotImplementedException(
+ "field-access-by-index for data of type " + t);
+ }
+ }
+ }
+ }
- private boolean[] computeOpenFields(AbstractFunctionCallExpression expr, ARecordType recType) {
- int n = expr.getArguments().size() / 2;
- boolean[] open = new boolean[n];
- for (int i = 0; i < n; i++) {
- Mutable<ILogicalExpression> argRef = expr.getArguments().get(2 * i);
- ILogicalExpression arg = argRef.getValue();
- if (arg.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
- String fn = ((AString) ((AsterixConstantValue) ((ConstantExpression) arg).getValue()).getObject())
- .getStringValue();
- open[i] = true;
- for (String s : recType.getFieldNames()) {
- if (s.equals(fn)) {
- open[i] = false;
- break;
- }
- }
- } else {
- open[i] = true;
- }
- }
- return open;
- }
+ private boolean[] computeOpenFields(AbstractFunctionCallExpression expr,
+ ARecordType recType) {
+ int n = expr.getArguments().size() / 2;
+ boolean[] open = new boolean[n];
+ for (int i = 0; i < n; i++) {
+ Mutable<ILogicalExpression> argRef = expr.getArguments().get(2 * i);
+ ILogicalExpression arg = argRef.getValue();
+ if (arg.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
+ String fn = ((AString) ((AsterixConstantValue) ((ConstantExpression) arg)
+ .getValue()).getObject()).getStringValue();
+ open[i] = true;
+ for (String s : recType.getFieldNames()) {
+ if (s.equals(fn)) {
+ open[i] = false;
+ break;
+ }
+ }
+ } else {
+ open[i] = true;
+ }
+ }
+ return open;
+ }
- @Override
- public IPrinterFactoryProvider getPrinterFactoryProvider() {
- return AqlPrinterFactoryProvider.INSTANCE;
- }
+ @Override
+ public IPrinterFactoryProvider getPrinterFactoryProvider() {
+ return AqlPrinterFactoryProvider.INSTANCE;
+ }
- @SuppressWarnings("unchecked")
- @Override
- public ICopyEvaluatorFactory getConstantEvalFactory(IAlgebricksConstantValue value) throws AlgebricksException {
- IAObject obj = null;
- if (value.isNull()) {
- obj = ANull.NULL;
- } else if (value.isTrue()) {
- obj = ABoolean.TRUE;
- } else if (value.isFalse()) {
- obj = ABoolean.FALSE;
- } else {
- AsterixConstantValue acv = (AsterixConstantValue) value;
- obj = acv.getObject();
- }
- ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
- DataOutput dos = abvs.getDataOutput();
- try {
- AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(obj.getType()).serialize(obj, dos);
- } catch (HyracksDataException e) {
- throw new AlgebricksException(e);
- }
- return new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(), abvs.getLength()));
- }
+ @SuppressWarnings("unchecked")
+ @Override
+ public ICopyEvaluatorFactory getConstantEvalFactory(
+ IAlgebricksConstantValue value) throws AlgebricksException {
+ IAObject obj = null;
+ if (value.isNull()) {
+ obj = ANull.NULL;
+ } else if (value.isTrue()) {
+ obj = ABoolean.TRUE;
+ } else if (value.isFalse()) {
+ obj = ABoolean.FALSE;
+ } else {
+ AsterixConstantValue acv = (AsterixConstantValue) value;
+ obj = acv.getObject();
+ }
+ ArrayBackedValueStorage abvs = new ArrayBackedValueStorage();
+ DataOutput dos = abvs.getDataOutput();
+ try {
+ AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(obj.getType()).serialize(obj,
+ dos);
+ } catch (HyracksDataException e) {
+ throw new AlgebricksException(e);
+ }
+ return new ConstantEvalFactory(Arrays.copyOf(abvs.getByteArray(),
+ abvs.getLength()));
+ }
- @Override
- public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
- return AqlBinaryIntegerInspector.FACTORY;
- }
+ @Override
+ public IBinaryIntegerInspectorFactory getBinaryIntegerInspectorFactory() {
+ return AqlBinaryIntegerInspector.FACTORY;
+ }
- @Override
- public ITupleParserFactory createTupleParser(ARecordType recType, IParseFileSplitsDecl decl) {
- if (decl.isDelimitedFileFormat()) {
- int n = recType.getFieldTypes().length;
- IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
- IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
- if (vpf == null) {
- throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
- }
- fieldParserFactories[i] = vpf;
- }
- return new NtDelimitedDataTupleParserFactory(recType, fieldParserFactories, decl.getDelimChar());
- } else {
- return new AdmSchemafullRecordParserFactory(recType);
- }
- }
+ @Override
+ public ITupleParserFactory createTupleParser(ARecordType recType,
+ IParseFileSplitsDecl decl) {
+ if (decl.isDelimitedFileFormat()) {
+ int n = recType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException(
+ "No value parser factory for delimited fields of type "
+ + tag);
+ }
+ fieldParserFactories[i] = vpf;
+ }
+ return new NtDelimitedDataTupleParserFactory(recType,
+ fieldParserFactories, decl.getDelimChar());
+ } else {
+ return new AdmSchemafullRecordParserFactory(recType);
+ }
+ }
- @Override
- public ITupleParserFactory createTupleParser(ARecordType recType, boolean delimitedFormat, Character delimiter) {
- if (delimitedFormat) {
- int n = recType.getFieldTypes().length;
- IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
- IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
- if (vpf == null) {
- throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
- }
- fieldParserFactories[i] = vpf;
- }
- return new NtDelimitedDataTupleParserFactory(recType, fieldParserFactories, delimiter);
- } else {
- return new AdmSchemafullRecordParserFactory(recType);
- }
- }
+ @Override
+ public ITupleParserFactory createTupleParser(ARecordType recType,
+ boolean delimitedFormat, Character delimiter) {
+ if (delimitedFormat) {
+ int n = recType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException(
+ "No value parser factory for delimited fields of type "
+ + tag);
+ }
+ fieldParserFactories[i] = vpf;
+ }
+ return new NtDelimitedDataTupleParserFactory(recType,
+ fieldParserFactories, delimiter);
+ } else {
+ return new AdmSchemafullRecordParserFactory(recType);
+ }
+ }
- @Override
- public INullWriterFactory getNullWriterFactory() {
- return AqlNullWriterFactory.INSTANCE;
- }
+ @Override
+ public INullWriterFactory getNullWriterFactory() {
+ return AqlNullWriterFactory.INSTANCE;
+ }
- @Override
- public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
- return new IExpressionEvalSizeComputer() {
- @Override
- public int getEvalSize(ILogicalExpression expr, IVariableEvalSizeEnvironment env)
- throws AlgebricksException {
- switch (expr.getExpressionTag()) {
- case CONSTANT: {
- ConstantExpression c = (ConstantExpression) expr;
- if (c == ConstantExpression.NULL) {
- return 1;
- } else if (c == ConstantExpression.FALSE || c == ConstantExpression.TRUE) {
- return 2;
- } else {
- AsterixConstantValue acv = (AsterixConstantValue) c.getValue();
- IAObject o = acv.getObject();
- switch (o.getType().getTypeTag()) {
- case DOUBLE: {
- return 9;
- }
- case BOOLEAN: {
- return 2;
- }
- case NULL: {
- return 1;
- }
- case INT32: {
- return 5;
- }
- case INT64: {
- return 9;
- }
- default: {
- // TODO
- return -1;
- }
- }
- }
- }
- case FUNCTION_CALL: {
- AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
- if (f.getFunctionIdentifier().equals(AsterixBuiltinFunctions.TID)) {
- return 5;
- } else {
- // TODO
- return -1;
- }
- }
- default: {
- // TODO
- return -1;
- }
- }
- }
- };
- }
+ @Override
+ public IExpressionEvalSizeComputer getExpressionEvalSizeComputer() {
+ return new IExpressionEvalSizeComputer() {
+ @Override
+ public int getEvalSize(ILogicalExpression expr,
+ IVariableEvalSizeEnvironment env)
+ throws AlgebricksException {
+ switch (expr.getExpressionTag()) {
+ case CONSTANT: {
+ ConstantExpression c = (ConstantExpression) expr;
+ if (c == ConstantExpression.NULL) {
+ return 1;
+ } else if (c == ConstantExpression.FALSE
+ || c == ConstantExpression.TRUE) {
+ return 2;
+ } else {
+ AsterixConstantValue acv = (AsterixConstantValue) c
+ .getValue();
+ IAObject o = acv.getObject();
+ switch (o.getType().getTypeTag()) {
+ case DOUBLE: {
+ return 9;
+ }
+ case BOOLEAN: {
+ return 2;
+ }
+ case NULL: {
+ return 1;
+ }
+ case INT32: {
+ return 5;
+ }
+ case INT64: {
+ return 9;
+ }
+ default: {
+ // TODO
+ return -1;
+ }
+ }
+ }
+ }
+ case FUNCTION_CALL: {
+ AbstractFunctionCallExpression f = (AbstractFunctionCallExpression) expr;
+ if (f.getFunctionIdentifier().equals(
+ AsterixBuiltinFunctions.TID)) {
+ return 5;
+ } else {
+ // TODO
+ return -1;
+ }
+ }
+ default: {
+ // TODO
+ return -1;
+ }
+ }
+ }
+ };
+ }
- @Override
- public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
- return AqlNormalizedKeyComputerFactoryProvider.INSTANCE;
- }
+ @Override
+ public INormalizedKeyComputerFactoryProvider getNormalizedKeyComputerFactoryProvider() {
+ return AqlNormalizedKeyComputerFactoryProvider.INSTANCE;
+ }
+
+ @Override
+ public IBinaryHashFunctionFamilyProvider getBinaryHashFunctionFamilyProvider() {
+ return AqlBinaryHashFunctionFamilyProvider.INSTANCE;
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
new file mode 100644
index 0000000..8606088
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/ADMDataParser.java
@@ -0,0 +1,925 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayDeque;
+import java.util.BitSet;
+import java.util.List;
+import java.util.Queue;
+
+import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
+import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
+import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
+import edu.uci.ics.asterix.adm.parser.nontagged.Token;
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.IAsterixListBuilder;
+import edu.uci.ics.asterix.builders.OrderedListBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.builders.UnorderedListBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
+import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.types.AOrderedListType;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+
+/**
+ * Parser for ADM formatted data.
+ */
+public class ADMDataParser extends AbstractDataParser implements IDataParser {
+
+ protected AdmLexer admLexer;
+ protected ARecordType recordType;
+ protected boolean datasetRec;
+
+ private int nullableFieldId = 0;
+
+ private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
+ private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
+ private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
+ private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
+
+ private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
+
+ @Override
+ public boolean parse(DataOutput out) throws HyracksDataException {
+ try {
+ return parseAdmInstance((IAType) recordType, datasetRec, out);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) {
+ admLexer = new AdmLexer(in);
+ this.recordType = recordType;
+ this.datasetRec = datasetRec;
+ }
+
+ protected boolean parseAdmInstance(IAType objectType, boolean datasetRec, DataOutput out) throws AsterixException,
+ IOException {
+ Token token;
+ try {
+ token = admLexer.next();
+ } catch (ParseException pe) {
+ throw new AsterixException(pe);
+ }
+ if (token.kind == AdmLexerConstants.EOF) {
+ return false;
+ } else {
+ admFromLexerStream(token, objectType, out, datasetRec);
+ return true;
+ }
+ }
+
+ private void admFromLexerStream(Token token, IAType objectType, DataOutput out, Boolean datasetRec)
+ throws AsterixException, IOException {
+
+ switch (token.kind) {
+ case AdmLexerConstants.NULL_LITERAL: {
+ if (checkType(ATypeTag.NULL, objectType, out)) {
+ nullSerde.serialize(ANull.NULL, out);
+ } else
+ throw new AsterixException(" This field can not be null ");
+ break;
+ }
+ case AdmLexerConstants.TRUE_LITERAL: {
+ if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
+ booleanSerde.serialize(ABoolean.TRUE, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.BOOLEAN_CONS: {
+ parseConstructor(ATypeTag.BOOLEAN, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.FALSE_LITERAL: {
+ if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
+ booleanSerde.serialize(ABoolean.FALSE, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.DOUBLE_LITERAL: {
+ if (checkType(ATypeTag.DOUBLE, objectType, out)) {
+ aDouble.setValue(Double.parseDouble(token.image));
+ doubleSerde.serialize(aDouble, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.DOUBLE_CONS: {
+ parseConstructor(ATypeTag.DOUBLE, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.FLOAT_LITERAL: {
+ if (checkType(ATypeTag.FLOAT, objectType, out)) {
+ aFloat.setValue(Float.parseFloat(token.image));
+ floatSerde.serialize(aFloat, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.FLOAT_CONS: {
+ parseConstructor(ATypeTag.FLOAT, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.INT8_LITERAL: {
+ if (checkType(ATypeTag.INT8, objectType, out)) {
+ parseInt8(token.image, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.INT8_CONS: {
+ parseConstructor(ATypeTag.INT8, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.INT16_LITERAL: {
+ if (checkType(ATypeTag.INT16, objectType, out)) {
+ parseInt16(token.image, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.INT16_CONS: {
+ parseConstructor(ATypeTag.INT16, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.INT_LITERAL:
+ case AdmLexerConstants.INT32_LITERAL: {
+ if (checkType(ATypeTag.INT32, objectType, out)) {
+ parseInt32(token.image, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.INT32_CONS: {
+ parseConstructor(ATypeTag.INT32, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.INT64_LITERAL: {
+ if (checkType(ATypeTag.INT64, objectType, out)) {
+ parseInt64(token.image, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.INT64_CONS: {
+ parseConstructor(ATypeTag.INT64, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.STRING_LITERAL: {
+ if (checkType(ATypeTag.STRING, objectType, out)) {
+ aString.setValue(token.image.substring(1, token.image.length() - 1));
+ stringSerde.serialize(aString, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ break;
+ }
+ case AdmLexerConstants.STRING_CONS: {
+ parseConstructor(ATypeTag.STRING, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.DATE_CONS: {
+ parseConstructor(ATypeTag.DATE, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.TIME_CONS: {
+ parseConstructor(ATypeTag.TIME, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.DATETIME_CONS: {
+ parseConstructor(ATypeTag.DATETIME, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.DURATION_CONS: {
+ parseConstructor(ATypeTag.DURATION, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.POINT_CONS: {
+ parseConstructor(ATypeTag.POINT, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.POINT3D_CONS: {
+ parseConstructor(ATypeTag.POINT3D, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.CIRCLE_CONS: {
+ parseConstructor(ATypeTag.CIRCLE, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.RECTANGLE_CONS: {
+ parseConstructor(ATypeTag.RECTANGLE, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.LINE_CONS: {
+ parseConstructor(ATypeTag.LINE, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.POLYGON_CONS: {
+ parseConstructor(ATypeTag.POLYGON, objectType, out);
+ break;
+ }
+ case AdmLexerConstants.START_UNORDERED_LIST: {
+ if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
+ objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
+ parseUnorderedList((AUnorderedListType) objectType, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+ break;
+ }
+
+ case AdmLexerConstants.START_ORDERED_LIST: {
+ if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
+ objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
+ parseOrderedList((AOrderedListType) objectType, out);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+ break;
+ }
+ case AdmLexerConstants.START_RECORD: {
+ if (checkType(ATypeTag.RECORD, objectType, out)) {
+ objectType = getComplexType(objectType, ATypeTag.RECORD);
+ parseRecord((ARecordType) objectType, out, datasetRec);
+ } else
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
+ break;
+ }
+ case AdmLexerConstants.EOF: {
+ break;
+ }
+ default: {
+ throw new AsterixException("Unexpected ADM token kind: " + admLexer.tokenKindToString(token.kind) + ".");
+ }
+ }
+ }
+
+ private void parseDatetime(String datetime, DataOutput out) throws AsterixException, IOException {
+ try {
+ ADateTimeSerializerDeserializer.parse(datetime, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseDuration(String duration, DataOutput out) throws AsterixException {
+ try {
+ ADurationSerializerDeserializer.parse(duration, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
+
+ if (aObjectType == null) {
+ return null;
+ }
+
+ if (aObjectType.getTypeTag() == tag)
+ return aObjectType;
+
+ if (aObjectType.getTypeTag() == ATypeTag.UNION) {
+ unionList = ((AUnionType) aObjectType).getUnionList();
+ for (int i = 0; i < unionList.size(); i++)
+ if (unionList.get(i).getTypeTag() == tag) {
+ return unionList.get(i);
+ }
+ }
+ return null; // wont get here
+ }
+
+ List<IAType> unionList;
+
+ private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType, DataOutput out) throws IOException {
+
+ if (aObjectType == null)
+ return true;
+
+ if (aObjectType.getTypeTag() != ATypeTag.UNION) {
+ if (expectedTypeTag == aObjectType.getTypeTag())
+ return true;
+ } else { // union
+ unionList = ((AUnionType) aObjectType).getUnionList();
+ for (int i = 0; i < unionList.size(); i++)
+ if (unionList.get(i).getTypeTag() == expectedTypeTag)
+ return true;
+ }
+ return false;
+ }
+
+ private void parseRecord(ARecordType recType, DataOutput out, Boolean datasetRec) throws IOException,
+ AsterixException {
+
+ ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
+ ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
+ IARecordBuilder recBuilder = getRecordBuilder();
+
+ // Boolean[] nulls = null;
+ BitSet nulls = null;
+ if (datasetRec) {
+ if (recType != null) {
+ nulls = new BitSet(recType.getFieldNames().length);
+ recBuilder.reset(recType);
+ } else
+ recBuilder.reset(null);
+ } else if (recType != null) {
+ nulls = new BitSet(recType.getFieldNames().length);
+ recBuilder.reset(recType);
+ } else
+ recBuilder.reset(null);
+
+ recBuilder.init();
+ Token token = null;
+ boolean inRecord = true;
+ boolean expectingRecordField = false;
+ boolean first = true;
+
+ Boolean openRecordField = false;
+ int fieldId = 0;
+ IAType fieldType = null;
+ do {
+ token = nextToken();
+ switch (token.kind) {
+ case AdmLexerConstants.END_RECORD: {
+ if (expectingRecordField) {
+ throw new AsterixException("Found END_RECORD while expecting a record field.");
+ }
+ inRecord = false;
+ break;
+ }
+ case AdmLexerConstants.STRING_LITERAL: {
+ // we've read the name of the field
+ // now read the content
+ fieldNameBuffer.reset();
+ fieldValueBuffer.reset();
+ expectingRecordField = false;
+
+ if (recType != null) {
+ String fldName = token.image.substring(1, token.image.length() - 1);
+ fieldId = recBuilder.getFieldId(fldName);
+ if (fieldId < 0 && !recType.isOpen()) {
+ throw new AsterixException("This record is closed, you can not add extra fields !!");
+ } else if (fieldId < 0 && recType.isOpen()) {
+ aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
+ stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+ openRecordField = true;
+ fieldType = null;
+ } else {
+ // a closed field
+ nulls.set(fieldId);
+ fieldType = recType.getFieldTypes()[fieldId];
+ openRecordField = false;
+ }
+ } else {
+ aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
+ stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+ openRecordField = true;
+ fieldType = null;
+ }
+
+ token = nextToken();
+ if (token.kind != AdmLexerConstants.COLON) {
+ throw new AsterixException("Unexpected ADM token kind: "
+ + admLexer.tokenKindToString(token.kind) + " while expecting \":\".");
+ }
+
+ token = nextToken();
+ this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput(), false);
+ if (openRecordField) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize())
+ recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
+ } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
+ if (NonTaggedFormatUtil.isOptionalField((AUnionType) recType.getFieldTypes()[fieldId])) {
+ if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+ recBuilder.addField(fieldId, fieldValueBuffer);
+ }
+ }
+ } else {
+ recBuilder.addField(fieldId, fieldValueBuffer);
+ }
+
+ break;
+ }
+ case AdmLexerConstants.COMMA: {
+ if (first) {
+ throw new AsterixException("Found COMMA before any record field.");
+ }
+ if (expectingRecordField) {
+ throw new AsterixException("Found COMMA while expecting a record field.");
+ }
+ expectingRecordField = true;
+ break;
+ }
+ default: {
+ throw new AsterixException("Unexpected ADM token kind: " + admLexer.tokenKindToString(token.kind)
+ + " while parsing record fields.");
+ }
+ }
+ first = false;
+ } while (inRecord);
+
+ if (recType != null) {
+ nullableFieldId = checkNullConstraints(recType, nulls);
+ if (nullableFieldId != -1)
+ throw new AsterixException("Field " + nullableFieldId + " can not be null");
+ }
+ recBuilder.write(out, true);
+ returnRecordBuilder(recBuilder);
+ returnTempBuffer(fieldNameBuffer);
+ returnTempBuffer(fieldValueBuffer);
+ }
+
+ private int checkNullConstraints(ARecordType recType, BitSet nulls) {
+
+ boolean isNull = false;
+ for (int i = 0; i < recType.getFieldTypes().length; i++)
+ if (nulls.get(i) == false) {
+ IAType type = recType.getFieldTypes()[i];
+ if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION)
+ return i;
+
+ if (type.getTypeTag() == ATypeTag.UNION) { // union
+ unionList = ((AUnionType) type).getUnionList();
+ for (int j = 0; j < unionList.size(); j++)
+ if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
+ isNull = true;
+ break;
+ }
+ if (!isNull)
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException, AsterixException {
+
+ ArrayBackedValueStorage itemBuffer = getTempBuffer();
+ OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
+
+ IAType itemType = null;
+ if (oltype != null)
+ itemType = oltype.getItemType();
+ orderedListBuilder.reset(oltype);
+
+ Token token = null;
+ boolean inList = true;
+ boolean expectingListItem = false;
+ boolean first = true;
+ do {
+ token = nextToken();
+ if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
+ if (expectingListItem) {
+ throw new AsterixException("Found END_COLLECTION while expecting a list item.");
+ }
+ inList = false;
+ } else if (token.kind == AdmLexerConstants.COMMA) {
+ if (first) {
+ throw new AsterixException("Found COMMA before any list item.");
+ }
+ if (expectingListItem) {
+ throw new AsterixException("Found COMMA while expecting a list item.");
+ }
+ expectingListItem = true;
+ } else {
+ expectingListItem = false;
+ itemBuffer.reset();
+
+ admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
+ orderedListBuilder.addItem(itemBuffer);
+ }
+ first = false;
+ } while (inList);
+ orderedListBuilder.write(out, true);
+ returnOrderedListBuilder(orderedListBuilder);
+ returnTempBuffer(itemBuffer);
+ }
+
+ private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException, AsterixException {
+
+ ArrayBackedValueStorage itemBuffer = getTempBuffer();
+ UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
+
+ IAType itemType = null;
+
+ if (uoltype != null)
+ itemType = uoltype.getItemType();
+ unorderedListBuilder.reset(uoltype);
+
+ Token token = null;
+ boolean inList = true;
+ boolean expectingListItem = false;
+ boolean first = true;
+ do {
+ token = nextToken();
+ if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
+ if (expectingListItem) {
+ throw new AsterixException("Found END_COLLECTION while expecting a list item.");
+ }
+ inList = false;
+ } else if (token.kind == AdmLexerConstants.COMMA) {
+ if (first) {
+ throw new AsterixException("Found COMMA before any list item.");
+ }
+ if (expectingListItem) {
+ throw new AsterixException("Found COMMA while expecting a list item.");
+ }
+ expectingListItem = true;
+ } else {
+ expectingListItem = false;
+ itemBuffer.reset();
+ admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
+ unorderedListBuilder.addItem(itemBuffer);
+ }
+ first = false;
+ } while (inList);
+ unorderedListBuilder.write(out, true);
+ returnUnorderedListBuilder(unorderedListBuilder);
+ returnTempBuffer(itemBuffer);
+ }
+
+ private Token nextToken() throws AsterixException {
+ try {
+ return admLexer.next();
+ } catch (ParseException pe) {
+ throw new AsterixException(pe);
+ }
+ }
+
+ private IARecordBuilder getRecordBuilder() {
+ RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
+ if (recBuilder != null)
+ return recBuilder;
+ else
+ return new RecordBuilder();
+ }
+
+ private void returnRecordBuilder(IARecordBuilder recBuilder) {
+ this.recordBuilderPool.add(recBuilder);
+ }
+
+ private IAsterixListBuilder getOrderedListBuilder() {
+ OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool.poll();
+ if (orderedListBuilder != null)
+ return orderedListBuilder;
+ else
+ return new OrderedListBuilder();
+ }
+
+ private void returnOrderedListBuilder(IAsterixListBuilder orderedListBuilder) {
+ this.orderedListBuilderPool.add(orderedListBuilder);
+ }
+
+ private IAsterixListBuilder getUnorderedListBuilder() {
+ UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool.poll();
+ if (unorderedListBuilder != null)
+ return unorderedListBuilder;
+ else
+ return new UnorderedListBuilder();
+ }
+
+ private void returnUnorderedListBuilder(IAsterixListBuilder unorderedListBuilder) {
+ this.unorderedListBuilderPool.add(unorderedListBuilder);
+ }
+
+ private ArrayBackedValueStorage getTempBuffer() {
+ ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
+ if (tmpBaaos != null) {
+ return tmpBaaos;
+ } else {
+ return new ArrayBackedValueStorage();
+ }
+ }
+
+ private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
+ baaosPool.add(tempBaaos);
+ }
+
+ private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
+ try {
+ Token token = admLexer.next();
+ if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
+ if (checkType(typeTag, objectType, out)) {
+ token = admLexer.next();
+ if (token.kind == AdmLexerConstants.STRING_LITERAL) {
+ switch (typeTag) {
+ case BOOLEAN:
+ parseBoolean(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case INT8:
+ parseInt8(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case INT16:
+ parseInt16(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case INT32:
+ parseInt32(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case INT64:
+ parseInt64(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case FLOAT:
+ aFloat.setValue(Float.parseFloat(token.image.substring(1, token.image.length() - 1)));
+ floatSerde.serialize(aFloat, out);
+ break;
+ case DOUBLE:
+ aDouble.setValue(Double.parseDouble(token.image.substring(1, token.image.length() - 1)));
+ doubleSerde.serialize(aDouble, out);
+ break;
+ case STRING:
+ aString.setValue(token.image.substring(1, token.image.length() - 1));
+ stringSerde.serialize(aString, out);
+ break;
+ case TIME:
+ parseTime(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case DATE:
+ parseDate(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case DATETIME:
+ parseDatetime(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case DURATION:
+ parseDuration(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case POINT:
+ parsePoint(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case POINT3D:
+ parsePoint3d(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case CIRCLE:
+ parseCircle(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case RECTANGLE:
+ parseRectangle(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case LINE:
+ parseLine(token.image.substring(1, token.image.length() - 1), out);
+ break;
+ case POLYGON:
+ parsePolygon(token.image.substring(1, token.image.length() - 1), out);
+ break;
+
+ }
+ token = admLexer.next();
+ if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
+ return;
+ }
+ }
+ }
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+ throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
+ }
+
+ private void parseBoolean(String bool, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of boolean";
+ try {
+ if (bool.equals("true"))
+ booleanSerde.serialize(ABoolean.TRUE, out);
+ else if (bool.equals("false"))
+ booleanSerde.serialize(ABoolean.FALSE, out);
+ else
+ throw new AsterixException(errorMessage);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt8(String int8, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of int8";
+ try {
+ boolean positive = true;
+ byte value = 0;
+ int offset = 0;
+
+ if (int8.charAt(offset) == '+')
+ offset++;
+ else if (int8.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int8.length(); offset++) {
+ if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
+ value = (byte) (value * 10 + int8.charAt(offset) - '0');
+ else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8' && offset + 2 == int8.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+ aInt8.setValue(value);
+ int8Serde.serialize(aInt8, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt16(String int16, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of int16";
+ try {
+ boolean positive = true;
+ short value = 0;
+ int offset = 0;
+
+ if (int16.charAt(offset) == '+')
+ offset++;
+ else if (int16.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int16.length(); offset++) {
+ if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
+ value = (short) (value * 10 + int16.charAt(offset) - '0');
+ else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1'
+ && int16.charAt(offset + 2) == '6' && offset + 3 == int16.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+ aInt16.setValue(value);
+ int16Serde.serialize(aInt16, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt32(String int32, DataOutput out) throws AsterixException {
+
+ String errorMessage = "This can not be an instance of int32";
+ try {
+ boolean positive = true;
+ int value = 0;
+ int offset = 0;
+
+ if (int32.charAt(offset) == '+')
+ offset++;
+ else if (int32.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int32.length(); offset++) {
+ if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
+ value = (value * 10 + int32.charAt(offset) - '0');
+ else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3'
+ && int32.charAt(offset + 2) == '2' && offset + 3 == int32.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+
+ aInt32.setValue(value);
+ int32Serde.serialize(aInt32, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parseInt64(String int64, DataOutput out) throws AsterixException {
+ String errorMessage = "This can not be an instance of int64";
+ try {
+ boolean positive = true;
+ long value = 0;
+ int offset = 0;
+
+ if (int64.charAt(offset) == '+')
+ offset++;
+ else if (int64.charAt(offset) == '-') {
+ offset++;
+ positive = false;
+ }
+ for (; offset < int64.length(); offset++) {
+ if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
+ value = (value * 10 + int64.charAt(offset) - '0');
+ else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6'
+ && int64.charAt(offset + 2) == '4' && offset + 3 == int64.length())
+ break;
+ else
+ throw new AsterixException(errorMessage);
+ }
+ if (value < 0)
+ throw new AsterixException(errorMessage);
+ if (value > 0 && !positive)
+ value *= -1;
+
+ aInt64.setValue(value);
+ int64Serde.serialize(aInt64, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(errorMessage);
+ }
+ }
+
+ private void parsePoint(String point, DataOutput out) throws AsterixException {
+ try {
+ APointSerializerDeserializer.parse(point, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parsePoint3d(String point3d, DataOutput out) throws AsterixException {
+ try {
+ APoint3DSerializerDeserializer.parse(point3d, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseCircle(String circle, DataOutput out) throws AsterixException {
+ try {
+ ACircleSerializerDeserializer.parse(circle, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseRectangle(String rectangle, DataOutput out) throws AsterixException {
+ try {
+ ARectangleSerializerDeserializer.parse(rectangle, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseLine(String line, DataOutput out) throws AsterixException {
+ try {
+ ALineSerializerDeserializer.parse(line, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parsePolygon(String polygon, DataOutput out) throws AsterixException, IOException {
+ try {
+ APolygonSerializerDeserializer.parse(polygon, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseTime(String time, DataOutput out) throws AsterixException {
+ try {
+ ATimeSerializerDeserializer.parse(time, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+ private void parseDate(String date, DataOutput out) throws AsterixException, IOException {
+ try {
+ ADateSerializerDeserializer.parse(date, out);
+ } catch (HyracksDataException e) {
+ throw new AsterixException(e);
+ }
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
new file mode 100644
index 0000000..fc2d7ca
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractDataParser.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import edu.uci.ics.asterix.om.base.ABoolean;
+import edu.uci.ics.asterix.om.base.ADouble;
+import edu.uci.ics.asterix.om.base.AFloat;
+import edu.uci.ics.asterix.om.base.AInt16;
+import edu.uci.ics.asterix.om.base.AInt32;
+import edu.uci.ics.asterix.om.base.AInt64;
+import edu.uci.ics.asterix.om.base.AInt8;
+import edu.uci.ics.asterix.om.base.AMutableDouble;
+import edu.uci.ics.asterix.om.base.AMutableFloat;
+import edu.uci.ics.asterix.om.base.AMutableInt16;
+import edu.uci.ics.asterix.om.base.AMutableInt32;
+import edu.uci.ics.asterix.om.base.AMutableInt64;
+import edu.uci.ics.asterix.om.base.AMutableInt8;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.base.ANull;
+import edu.uci.ics.asterix.om.base.AString;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+
+/**
+ * Base class for data parsers. Includes the common set of definitions for
+ * serializers/deserializers for built-in ADM types.
+ */
+public abstract class AbstractDataParser implements IDataParser {
+
+ protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
+ protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
+ protected AMutableInt32 aInt32 = new AMutableInt32(0);
+ protected AMutableInt64 aInt64 = new AMutableInt64(0);
+ protected AMutableDouble aDouble = new AMutableDouble(0);
+ protected AMutableFloat aFloat = new AMutableFloat(0);
+ protected AMutableString aString = new AMutableString("");
+ protected AMutableString aStringFieldName = new AMutableString("");
+
+ // Serializers
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ASTRING);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AFLOAT);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT8);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT16);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT32);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AINT64);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ @SuppressWarnings("unchecked")
+ protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ANULL);
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
index 6e83689..cb05529 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AbstractTupleParser.java
@@ -1,72 +1,91 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
+import java.io.DataOutput;
+import java.io.IOException;
import java.io.InputStream;
+import java.nio.ByteBuffer;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.AMutableDouble;
-import edu.uci.ics.asterix.om.base.AMutableFloat;
-import edu.uci.ics.asterix.om.base.AMutableInt16;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt64;
-import edu.uci.ics.asterix.om.base.AMutableInt8;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+/**
+ * An Abstract class implementation for ITupleParser. It provides common
+ * functionality involved in parsing data in an external format and packing
+ * frames with formed tuples.
+ */
public abstract class AbstractTupleParser implements ITupleParser {
- // Mutable Types..
- protected AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
- protected AMutableInt16 aInt16 = new AMutableInt16((short) 0);
- protected AMutableInt32 aInt32 = new AMutableInt32(0);
- protected AMutableInt64 aInt64 = new AMutableInt64(0);
- protected AMutableDouble aDouble = new AMutableDouble(0);
- protected AMutableFloat aFloat = new AMutableFloat(0);
- protected AMutableString aString = new AMutableString("");
- protected AMutableString aStringFieldName = new AMutableString("");
+ protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
+ protected DataOutput dos = tb.getDataOutput();
+ protected final FrameTupleAppender appender;
+ protected final ByteBuffer frame;
+ protected final ARecordType recType;
+ protected final IHyracksTaskContext ctx;
- // Serializers
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
- @SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ public AbstractTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
+ appender = new FrameTupleAppender(ctx.getFrameSize());
+ frame = ctx.allocateFrame();
+ this.recType = recType;
+ this.ctx = ctx;
+ }
-
- @Override
- public abstract void parse(InputStream in, IFrameWriter writer) throws HyracksDataException;
+ public abstract IDataParser getDataParser();
+
+ @Override
+ public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+ appender.reset(frame, true);
+ IDataParser parser = getDataParser();
+ try {
+ parser.initialize(in, recType, true);
+ while (true) {
+ tb.reset();
+ if (!parser.parse(tb.getDataOutput())) {
+ break;
+ }
+ tb.addFieldEndOffset();
+ addTupleToFrame(writer);
+ }
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } catch (AsterixException ae) {
+ throw new HyracksDataException(ae);
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ }
+ }
+
+ protected void addTupleToFrame(IFrameWriter writer) throws HyracksDataException {
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+
+ }
+
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
index 3b0d1ab..a9287c8 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmSchemafullRecordParserFactory.java
@@ -1,70 +1,28 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Queue;
-
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.IAsterixListBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.builders.UnorderedListBuilder;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ADouble;
-import edu.uci.ics.asterix.om.base.AFloat;
-import edu.uci.ics.asterix.om.base.AInt16;
-import edu.uci.ics.asterix.om.base.AInt32;
-import edu.uci.ics.asterix.om.base.AInt64;
-import edu.uci.ics.asterix.om.base.AInt8;
-import edu.uci.ics.asterix.om.base.AMutableDouble;
-import edu.uci.ics.asterix.om.base.AMutableFloat;
-import edu.uci.ics.asterix.om.base.AMutableInt16;
-import edu.uci.ics.asterix.om.base.AMutableInt32;
-import edu.uci.ics.asterix.om.base.AMutableInt64;
-import edu.uci.ics.asterix.om.base.AMutableInt8;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.base.AString;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+/**
+ * A Tuple parser factory for creating a tuple parser capable of parsing
+ * ADM data.
+ */
public class AdmSchemafullRecordParserFactory implements ITupleParserFactory {
private static final long serialVersionUID = 1L;
@@ -77,930 +35,7 @@
@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
- return new ITupleParser() {
- private AdmLexer admLexer;
- private ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- private DataOutput dos = tb.getDataOutput();
- private FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- private ByteBuffer frame = ctx.allocateFrame();
-
- private int nullableFieldId = 0;
-
- private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
- private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
- private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
- private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-
- private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-
- // Mutable Types..
- private AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
- private AMutableInt16 aInt16 = new AMutableInt16((short) 0);
- private AMutableInt32 aInt32 = new AMutableInt32(0);
- private AMutableInt64 aInt64 = new AMutableInt64(0);
- private AMutableDouble aDouble = new AMutableDouble(0);
- private AMutableFloat aFloat = new AMutableFloat(0);
- private AMutableString aString = new AMutableString("");
- private AMutableString aStringFieldName = new AMutableString("");
-
- // Serializers
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADouble> doubleSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AFloat> floatSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt8> int8Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt16> int16Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt32> int32Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABoolean> booleanSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
-
- @Override
- public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
- admLexer = new AdmLexer(in);
- appender.reset(frame, true);
- int tupleNum = 0;
- try {
- while (true) {
- tb.reset();
- if (!parseAdmInstance(recType, true, dos)) {
- break;
- }
- tb.addFieldEndOffset();
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- tupleNum++;
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (AsterixException ae) {
- throw new HyracksDataException(ae);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
- }
- }
-
- private boolean parseAdmInstance(IAType objectType, Boolean datasetRec, DataOutput out)
- throws AsterixException, IOException {
- Token token;
- try {
- token = admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- if (token.kind == AdmLexerConstants.EOF) {
- return false;
- } else {
- admFromLexerStream(token, objectType, out, datasetRec);
- return true;
- }
- }
-
- private void admFromLexerStream(Token token, IAType objectType, DataOutput out, Boolean datasetRec)
- throws AsterixException, IOException {
-
- switch (token.kind) {
- case AdmLexerConstants.NULL_LITERAL: {
- if (checkType(ATypeTag.NULL, objectType, out)) {
- nullSerde.serialize(ANull.NULL, out);
- } else
- throw new AsterixException(" This field can not be null ");
- break;
- }
- case AdmLexerConstants.TRUE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.TRUE, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.BOOLEAN_CONS: {
- parseConstructor(ATypeTag.BOOLEAN, objectType, out);
- break;
- }
- case AdmLexerConstants.FALSE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.FALSE, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_LITERAL: {
- if (checkType(ATypeTag.DOUBLE, objectType, out)) {
- aDouble.setValue(Double.parseDouble(token.image));
- doubleSerde.serialize(aDouble, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_CONS: {
- parseConstructor(ATypeTag.DOUBLE, objectType, out);
- break;
- }
- case AdmLexerConstants.FLOAT_LITERAL: {
- if (checkType(ATypeTag.FLOAT, objectType, out)) {
- aFloat.setValue(Float.parseFloat(token.image));
- floatSerde.serialize(aFloat, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.FLOAT_CONS: {
- parseConstructor(ATypeTag.FLOAT, objectType, out);
- break;
- }
- case AdmLexerConstants.INT8_LITERAL: {
- if (checkType(ATypeTag.INT8, objectType, out)) {
- parseInt8(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT8_CONS: {
- parseConstructor(ATypeTag.INT8, objectType, out);
- break;
- }
- case AdmLexerConstants.INT16_LITERAL: {
- if (checkType(ATypeTag.INT16, objectType, out)) {
- parseInt16(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT16_CONS: {
- parseConstructor(ATypeTag.INT16, objectType, out);
- break;
- }
- case AdmLexerConstants.INT_LITERAL:
- case AdmLexerConstants.INT32_LITERAL: {
- if (checkType(ATypeTag.INT32, objectType, out)) {
- parseInt32(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT32_CONS: {
- parseConstructor(ATypeTag.INT32, objectType, out);
- break;
- }
- case AdmLexerConstants.INT64_LITERAL: {
- if (checkType(ATypeTag.INT64, objectType, out)) {
- parseInt64(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT64_CONS: {
- parseConstructor(ATypeTag.INT64, objectType, out);
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- if (checkType(ATypeTag.STRING, objectType, out)) {
- aString.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aString, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.STRING_CONS: {
- parseConstructor(ATypeTag.STRING, objectType, out);
- break;
- }
- case AdmLexerConstants.DATE_CONS: {
- parseConstructor(ATypeTag.DATE, objectType, out);
- break;
- }
- case AdmLexerConstants.TIME_CONS: {
- parseConstructor(ATypeTag.TIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DATETIME_CONS: {
- parseConstructor(ATypeTag.DATETIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DURATION_CONS: {
- parseConstructor(ATypeTag.DURATION, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT_CONS: {
- parseConstructor(ATypeTag.POINT, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT3D_CONS: {
- parseConstructor(ATypeTag.POINT3D, objectType, out);
- break;
- }
- case AdmLexerConstants.CIRCLE_CONS: {
- parseConstructor(ATypeTag.CIRCLE, objectType, out);
- break;
- }
- case AdmLexerConstants.RECTANGLE_CONS: {
- parseConstructor(ATypeTag.RECTANGLE, objectType, out);
- break;
- }
- case AdmLexerConstants.LINE_CONS: {
- parseConstructor(ATypeTag.LINE, objectType, out);
- break;
- }
- case AdmLexerConstants.POLYGON_CONS: {
- parseConstructor(ATypeTag.POLYGON, objectType, out);
- break;
- }
- case AdmLexerConstants.START_UNORDERED_LIST: {
- if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
- parseUnorderedList((AUnorderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
- break;
- }
-
- case AdmLexerConstants.START_ORDERED_LIST: {
- if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
- parseOrderedList((AOrderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.START_RECORD: {
- if (checkType(ATypeTag.RECORD, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.RECORD);
- parseRecord((ARecordType) objectType, out, datasetRec);
- } else
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.EOF: {
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + ".");
- }
- }
- }
-
- private void parseConstructor(ATypeTag typeTag, IAType objectType, DataOutput out) throws AsterixException {
- try {
- Token token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
- if (checkType(typeTag, objectType, out)) {
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.STRING_LITERAL) {
- switch (typeTag) {
- case BOOLEAN:
- parseBoolean(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT8:
- parseInt8(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT16:
- parseInt16(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT32:
- parseInt32(token.image.substring(1, token.image.length() - 1), out);
- break;
- case INT64:
- parseInt64(token.image.substring(1, token.image.length() - 1), out);
- break;
- case FLOAT:
- aFloat.setValue(Float.parseFloat(token.image.substring(1,
- token.image.length() - 1)));
- floatSerde.serialize(aFloat, out);
- break;
- case DOUBLE:
- aDouble.setValue(Double.parseDouble(token.image.substring(1,
- token.image.length() - 1)));
- doubleSerde.serialize(aDouble, out);
- break;
- case STRING:
- aString.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aString, out);
- break;
- case TIME:
- parseTime(token.image.substring(1, token.image.length() - 1), out);
- break;
- case DATE:
- parseDate(token.image.substring(1, token.image.length() - 1), out);
- break;
- case DATETIME:
- parseDatetime(token.image.substring(1, token.image.length() - 1), out);
- break;
- case DURATION:
- parseDuration(token.image.substring(1, token.image.length() - 1), out);
- break;
- case POINT:
- parsePoint(token.image.substring(1, token.image.length() - 1), out);
- break;
- case POINT3D:
- parsePoint3d(token.image.substring(1, token.image.length() - 1), out);
- break;
- case CIRCLE:
- parseCircle(token.image.substring(1, token.image.length() - 1), out);
- break;
- case RECTANGLE:
- parseRectangle(token.image.substring(1, token.image.length() - 1), out);
- break;
- case LINE:
- parseLine(token.image.substring(1, token.image.length() - 1), out);
- break;
- case POLYGON:
- parsePolygon(token.image.substring(1, token.image.length() - 1), out);
- break;
-
- }
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
- return;
- }
- }
- }
- } catch (Exception e) {
- throw new AsterixException(e);
- }
- throw new AsterixException(mismatchErrorMessage + objectType.getTypeName());
- }
-
- private void parseBoolean(String bool, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of boolean";
- try {
- if (bool.equals("true"))
- booleanSerde.serialize(ABoolean.TRUE, out);
- else if (bool.equals("false"))
- booleanSerde.serialize(ABoolean.FALSE, out);
- else
- throw new AsterixException(errorMessage);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt8(String int8, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int8";
- try {
- boolean positive = true;
- byte value = 0;
- int offset = 0;
-
- if (int8.charAt(offset) == '+')
- offset++;
- else if (int8.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int8.length(); offset++) {
- if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
- value = (byte) (value * 10 + int8.charAt(offset) - '0');
- else if (int8.charAt(offset) == 'i' && int8.charAt(offset + 1) == '8'
- && offset + 2 == int8.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt8.setValue(value);
- int8Serde.serialize(aInt8, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt16(String int16, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int16";
- try {
- boolean positive = true;
- short value = 0;
- int offset = 0;
-
- if (int16.charAt(offset) == '+')
- offset++;
- else if (int16.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int16.length(); offset++) {
- if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
- value = (short) (value * 10 + int16.charAt(offset) - '0');
- else if (int16.charAt(offset) == 'i' && int16.charAt(offset + 1) == '1'
- && int16.charAt(offset + 2) == '6' && offset + 3 == int16.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt16.setValue(value);
- int16Serde.serialize(aInt16, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt32(String int32, DataOutput out) throws AsterixException {
-
- String errorMessage = "This can not be an instance of int32";
- try {
- boolean positive = true;
- int value = 0;
- int offset = 0;
-
- if (int32.charAt(offset) == '+')
- offset++;
- else if (int32.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int32.length(); offset++) {
- if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
- value = (value * 10 + int32.charAt(offset) - '0');
- else if (int32.charAt(offset) == 'i' && int32.charAt(offset + 1) == '3'
- && int32.charAt(offset + 2) == '2' && offset + 3 == int32.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt32.setValue(value);
- int32Serde.serialize(aInt32, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt64(String int64, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int64";
- try {
- boolean positive = true;
- long value = 0;
- int offset = 0;
-
- if (int64.charAt(offset) == '+')
- offset++;
- else if (int64.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int64.length(); offset++) {
- if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
- value = (value * 10 + int64.charAt(offset) - '0');
- else if (int64.charAt(offset) == 'i' && int64.charAt(offset + 1) == '6'
- && int64.charAt(offset + 2) == '4' && offset + 3 == int64.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt64.setValue(value);
- int64Serde.serialize(aInt64, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parsePoint(String point, DataOutput out) throws AsterixException {
- try {
- APointSerializerDeserializer.parse(point, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePoint3d(String point3d, DataOutput out) throws AsterixException {
- try {
- APoint3DSerializerDeserializer.parse(point3d, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseCircle(String circle, DataOutput out) throws AsterixException {
- try {
- ACircleSerializerDeserializer.parse(circle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseRectangle(String rectangle, DataOutput out) throws AsterixException {
- try {
- ARectangleSerializerDeserializer.parse(rectangle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseLine(String line, DataOutput out) throws AsterixException {
- try {
- ALineSerializerDeserializer.parse(line, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePolygon(String polygon, DataOutput out) throws AsterixException, IOException {
- try {
- APolygonSerializerDeserializer.parse(polygon, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseTime(String time, DataOutput out) throws AsterixException {
- try {
- ATimeSerializerDeserializer.parse(time, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDate(String date, DataOutput out) throws AsterixException, IOException {
- try {
- ADateSerializerDeserializer.parse(date, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDatetime(String datetime, DataOutput out) throws AsterixException, IOException {
- try {
- ADateTimeSerializerDeserializer.parse(datetime, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDuration(String duration, DataOutput out) throws AsterixException {
- try {
- ADurationSerializerDeserializer.parse(duration, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
-
- }
-
- private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
-
- if (aObjectType == null) {
- return null;
- }
-
- if (aObjectType.getTypeTag() == tag)
- return aObjectType;
-
- if (aObjectType.getTypeTag() == ATypeTag.UNION) {
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == tag) {
- return unionList.get(i);
- }
- }
- return null; // wont get here
- }
-
- List<IAType> unionList;
-
- private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType, DataOutput out) throws IOException {
-
- if (aObjectType == null)
- return true;
-
- if (aObjectType.getTypeTag() != ATypeTag.UNION) {
- if (expectedTypeTag == aObjectType.getTypeTag())
- return true;
- } else { // union
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == expectedTypeTag)
- return true;
- }
- return false;
- }
-
- private void parseRecord(ARecordType recType, DataOutput out, Boolean datasetRec) throws IOException,
- AsterixException {
-
- ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
- ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
- IARecordBuilder recBuilder = getRecordBuilder();
-
- // Boolean[] nulls = null;
- BitSet nulls = null;
- if (datasetRec) {
- if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
- } else if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
-
- recBuilder.init();
- Token token = null;
- boolean inRecord = true;
- boolean expectingRecordField = false;
- boolean first = true;
-
- Boolean openRecordField = false;
- int fieldId = 0;
- IAType fieldType = null;
- do {
- token = nextToken();
- switch (token.kind) {
- case AdmLexerConstants.END_RECORD: {
- if (expectingRecordField) {
- throw new AsterixException("Found END_RECORD while expecting a record field.");
- }
- inRecord = false;
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- // we've read the name of the field
- // now read the content
- fieldNameBuffer.reset();
- fieldValueBuffer.reset();
- expectingRecordField = false;
-
- if (recType != null) {
- String fldName = token.image.substring(1, token.image.length() - 1);
- fieldId = recBuilder.getFieldId(fldName);
- if (fieldId < 0 && !recType.isOpen()) {
- throw new AsterixException("This record is closed, you can not add extra fields !!");
- } else if (fieldId < 0 && recType.isOpen()) {
- aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- } else {
- // a closed field
- nulls.set(fieldId);
- fieldType = recType.getFieldTypes()[fieldId];
- openRecordField = false;
- }
- } else {
- aStringFieldName.setValue(token.image.substring(1, token.image.length() - 1));
- stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- }
-
- token = nextToken();
- if (token.kind != AdmLexerConstants.COLON) {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + " while expecting \":\".");
- }
-
- token = nextToken();
- this.admFromLexerStream(token, fieldType, fieldValueBuffer.getDataOutput(), false);
- if (openRecordField) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize())
- recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
- } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
- if (NonTaggedFormatUtil.isOptionalField((AUnionType) recType.getFieldTypes()[fieldId])) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
- }
- } else {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
-
- break;
- }
- case AdmLexerConstants.COMMA: {
- if (first) {
- throw new AsterixException("Found COMMA before any record field.");
- }
- if (expectingRecordField) {
- throw new AsterixException("Found COMMA while expecting a record field.");
- }
- expectingRecordField = true;
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + " while parsing record fields.");
- }
- }
- first = false;
- } while (inRecord);
-
- if (recType != null) {
- nullableFieldId = checkNullConstraints(recType, nulls);
- if (nullableFieldId != -1)
- throw new AsterixException("Field " + nullableFieldId + " can not be null");
- }
- recBuilder.write(out, true);
- returnRecordBuilder(recBuilder);
- returnTempBuffer(fieldNameBuffer);
- returnTempBuffer(fieldValueBuffer);
- }
-
- private int checkNullConstraints(ARecordType recType, BitSet nulls) {
-
- boolean isNull = false;
- for (int i = 0; i < recType.getFieldTypes().length; i++)
- if (nulls.get(i) == false) {
- IAType type = recType.getFieldTypes()[i];
- if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION)
- return i;
-
- if (type.getTypeTag() == ATypeTag.UNION) { // union
- unionList = ((AUnionType) type).getUnionList();
- for (int j = 0; j < unionList.size(); j++)
- if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
- isNull = true;
- break;
- }
- if (!isNull)
- return i;
- }
- }
- return -1;
- }
-
- private void parseOrderedList(AOrderedListType oltype, DataOutput out) throws IOException, AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
-
- IAType itemType = null;
- if (oltype != null)
- itemType = oltype.getItemType();
- orderedListBuilder.reset(oltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException("Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException("Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException("Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
-
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
- orderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- orderedListBuilder.write(out, true);
- returnOrderedListBuilder(orderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out) throws IOException,
- AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
-
- IAType itemType = null;
-
- if (uoltype != null)
- itemType = uoltype.getItemType();
- unorderedListBuilder.reset(uoltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException("Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException("Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException("Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(), false);
- unorderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- unorderedListBuilder.write(out, true);
- returnUnorderedListBuilder(unorderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private Token nextToken() throws AsterixException {
- try {
- return admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- }
-
- private IARecordBuilder getRecordBuilder() {
- RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
- if (recBuilder != null)
- return recBuilder;
- else
- return new RecordBuilder();
- }
-
- private void returnRecordBuilder(IARecordBuilder recBuilder) {
- this.recordBuilderPool.add(recBuilder);
- }
-
- private IAsterixListBuilder getOrderedListBuilder() {
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool.poll();
- if (orderedListBuilder != null)
- return orderedListBuilder;
- else
- return new OrderedListBuilder();
- }
-
- private void returnOrderedListBuilder(IAsterixListBuilder orderedListBuilder) {
- this.orderedListBuilderPool.add(orderedListBuilder);
- }
-
- private IAsterixListBuilder getUnorderedListBuilder() {
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool.poll();
- if (unorderedListBuilder != null)
- return unorderedListBuilder;
- else
- return new UnorderedListBuilder();
- }
-
- private void returnUnorderedListBuilder(IAsterixListBuilder unorderedListBuilder) {
- this.unorderedListBuilderPool.add(unorderedListBuilder);
- }
-
- private ArrayBackedValueStorage getTempBuffer() {
- ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
- if (tmpBaaos != null) {
- return tmpBaaos;
- } else {
- return new ArrayBackedValueStorage();
- }
- }
-
- private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
- baaosPool.add(tempBaaos);
- }
- };
+ return new AdmTupleParser(ctx, recType);
}
+
}
\ No newline at end of file
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
index 65223c1..9be4c00 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/AdmTupleParser.java
@@ -1,1046 +1,35 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.util.ArrayDeque;
-import java.util.BitSet;
-import java.util.List;
-import java.util.Queue;
-
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexer;
-import edu.uci.ics.asterix.adm.parser.nontagged.AdmLexerConstants;
-import edu.uci.ics.asterix.adm.parser.nontagged.ParseException;
-import edu.uci.ics.asterix.adm.parser.nontagged.Token;
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.IAsterixListBuilder;
-import edu.uci.ics.asterix.builders.OrderedListBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.builders.UnorderedListBuilder;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ACircleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADateTimeSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ADurationSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ALineSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APoint3DSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APointSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.APolygonSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARectangleSerializerDeserializer;
-import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ATimeSerializerDeserializer;
-import edu.uci.ics.asterix.om.base.ABoolean;
-import edu.uci.ics.asterix.om.base.ANull;
-import edu.uci.ics.asterix.om.types.AOrderedListType;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.AUnionType;
-import edu.uci.ics.asterix.om.types.AUnorderedListType;
-import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.NonTaggedFormatUtil;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-public class AdmTupleParser extends AbstractTupleParser {
+/**
+ * An extension of AbstractTupleParser that provides functionality for
+ * parsing delimited files.
+ */
+public class AdmTupleParser extends AbstractTupleParser {
- protected AdmLexer admLexer;
- protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- protected DataOutput dos = tb.getDataOutput();
- protected final FrameTupleAppender appender;
- protected final ByteBuffer frame;
- protected final ARecordType recType;
+ public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
+ super(ctx, recType);
+ }
- private int nullableFieldId = 0;
-
- private Queue<ArrayBackedValueStorage> baaosPool = new ArrayDeque<ArrayBackedValueStorage>();
- private Queue<IARecordBuilder> recordBuilderPool = new ArrayDeque<IARecordBuilder>();
- private Queue<IAsterixListBuilder> orderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
- private Queue<IAsterixListBuilder> unorderedListBuilderPool = new ArrayDeque<IAsterixListBuilder>();
-
- private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
-
-
- public AdmTupleParser(IHyracksTaskContext ctx, ARecordType recType) {
- appender = new FrameTupleAppender(ctx.getFrameSize());
- frame = ctx.allocateFrame();
- this.recType = recType;
-
- }
-
- @Override
- public void parse(InputStream in, IFrameWriter writer)
- throws HyracksDataException {
- admLexer = new AdmLexer(in);
- appender.reset(frame, true);
- int tupleNum = 0;
- try {
- while (true) {
- tb.reset();
- if (!parseAdmInstance(recType, true, dos)) {
- break;
- }
- tb.addFieldEndOffset();
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- tupleNum++;
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (AsterixException ae) {
- throw new HyracksDataException(ae);
- } catch (IOException ioe) {
- throw new HyracksDataException(ioe);
- }
- }
-
- protected boolean parseAdmInstance(IAType objectType, Boolean datasetRec,
- DataOutput out) throws AsterixException, IOException {
- Token token;
- try {
- token = admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- if (token.kind == AdmLexerConstants.EOF) {
- return false;
- } else {
- admFromLexerStream(token, objectType, out, datasetRec);
- return true;
- }
- }
-
- private void admFromLexerStream(Token token, IAType objectType,
- DataOutput out, Boolean datasetRec) throws AsterixException,
- IOException {
-
- switch (token.kind) {
- case AdmLexerConstants.NULL_LITERAL: {
- if (checkType(ATypeTag.NULL, objectType, out)) {
- nullSerde.serialize(ANull.NULL, out);
- } else
- throw new AsterixException(" This field can not be null ");
- break;
- }
- case AdmLexerConstants.TRUE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.TRUE, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.BOOLEAN_CONS: {
- parseConstructor(ATypeTag.BOOLEAN, objectType, out);
- break;
- }
- case AdmLexerConstants.FALSE_LITERAL: {
- if (checkType(ATypeTag.BOOLEAN, objectType, out)) {
- booleanSerde.serialize(ABoolean.FALSE, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_LITERAL: {
- if (checkType(ATypeTag.DOUBLE, objectType, out)) {
- aDouble.setValue(Double.parseDouble(token.image));
- doubleSerde.serialize(aDouble, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.DOUBLE_CONS: {
- parseConstructor(ATypeTag.DOUBLE, objectType, out);
- break;
- }
- case AdmLexerConstants.FLOAT_LITERAL: {
- if (checkType(ATypeTag.FLOAT, objectType, out)) {
- aFloat.setValue(Float.parseFloat(token.image));
- floatSerde.serialize(aFloat, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.FLOAT_CONS: {
- parseConstructor(ATypeTag.FLOAT, objectType, out);
- break;
- }
- case AdmLexerConstants.INT8_LITERAL: {
- if (checkType(ATypeTag.INT8, objectType, out)) {
- parseInt8(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT8_CONS: {
- parseConstructor(ATypeTag.INT8, objectType, out);
- break;
- }
- case AdmLexerConstants.INT16_LITERAL: {
- if (checkType(ATypeTag.INT16, objectType, out)) {
- parseInt16(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT16_CONS: {
- parseConstructor(ATypeTag.INT16, objectType, out);
- break;
- }
- case AdmLexerConstants.INT_LITERAL:
- case AdmLexerConstants.INT32_LITERAL: {
- if (checkType(ATypeTag.INT32, objectType, out)) {
- parseInt32(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT32_CONS: {
- parseConstructor(ATypeTag.INT32, objectType, out);
- break;
- }
- case AdmLexerConstants.INT64_LITERAL: {
- if (checkType(ATypeTag.INT64, objectType, out)) {
- parseInt64(token.image, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.INT64_CONS: {
- parseConstructor(ATypeTag.INT64, objectType, out);
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- if (checkType(ATypeTag.STRING, objectType, out)) {
- aString.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aString, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- break;
- }
- case AdmLexerConstants.STRING_CONS: {
- parseConstructor(ATypeTag.STRING, objectType, out);
- break;
- }
- case AdmLexerConstants.DATE_CONS: {
- parseConstructor(ATypeTag.DATE, objectType, out);
- break;
- }
- case AdmLexerConstants.TIME_CONS: {
- parseConstructor(ATypeTag.TIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DATETIME_CONS: {
- parseConstructor(ATypeTag.DATETIME, objectType, out);
- break;
- }
- case AdmLexerConstants.DURATION_CONS: {
- parseConstructor(ATypeTag.DURATION, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT_CONS: {
- parseConstructor(ATypeTag.POINT, objectType, out);
- break;
- }
- case AdmLexerConstants.POINT3D_CONS: {
- parseConstructor(ATypeTag.POINT3D, objectType, out);
- break;
- }
- case AdmLexerConstants.CIRCLE_CONS: {
- parseConstructor(ATypeTag.CIRCLE, objectType, out);
- break;
- }
- case AdmLexerConstants.RECTANGLE_CONS: {
- parseConstructor(ATypeTag.RECTANGLE, objectType, out);
- break;
- }
- case AdmLexerConstants.LINE_CONS: {
- parseConstructor(ATypeTag.LINE, objectType, out);
- break;
- }
- case AdmLexerConstants.POLYGON_CONS: {
- parseConstructor(ATypeTag.POLYGON, objectType, out);
- break;
- }
- case AdmLexerConstants.START_UNORDERED_LIST: {
- if (checkType(ATypeTag.UNORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.UNORDEREDLIST);
- parseUnorderedList((AUnorderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeTag());
- break;
- }
-
- case AdmLexerConstants.START_ORDERED_LIST: {
- if (checkType(ATypeTag.ORDEREDLIST, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.ORDEREDLIST);
- parseOrderedList((AOrderedListType) objectType, out);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.START_RECORD: {
- if (checkType(ATypeTag.RECORD, objectType, out)) {
- objectType = getComplexType(objectType, ATypeTag.RECORD);
- parseRecord((ARecordType) objectType, out, datasetRec);
- } else
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeTag());
- break;
- }
- case AdmLexerConstants.EOF: {
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind) + ".");
- }
- }
- }
-
- private void parseDatetime(String datetime, DataOutput out)
- throws AsterixException, IOException {
- try {
- ADateTimeSerializerDeserializer.parse(datetime, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDuration(String duration, DataOutput out)
- throws AsterixException {
- try {
- ADurationSerializerDeserializer.parse(duration, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
-
- }
-
- private IAType getComplexType(IAType aObjectType, ATypeTag tag) {
-
- if (aObjectType == null) {
- return null;
- }
-
- if (aObjectType.getTypeTag() == tag)
- return aObjectType;
-
- if (aObjectType.getTypeTag() == ATypeTag.UNION) {
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == tag) {
- return unionList.get(i);
- }
- }
- return null; // wont get here
- }
-
- List<IAType> unionList;
-
- private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType,
- DataOutput out) throws IOException {
-
- if (aObjectType == null)
- return true;
-
- if (aObjectType.getTypeTag() != ATypeTag.UNION) {
- if (expectedTypeTag == aObjectType.getTypeTag())
- return true;
- } else { // union
- unionList = ((AUnionType) aObjectType).getUnionList();
- for (int i = 0; i < unionList.size(); i++)
- if (unionList.get(i).getTypeTag() == expectedTypeTag)
- return true;
- }
- return false;
- }
-
- private void parseRecord(ARecordType recType, DataOutput out,
- Boolean datasetRec) throws IOException, AsterixException {
-
- ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
- ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
- IARecordBuilder recBuilder = getRecordBuilder();
-
- // Boolean[] nulls = null;
- BitSet nulls = null;
- if (datasetRec) {
- if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
- } else if (recType != null) {
- nulls = new BitSet(recType.getFieldNames().length);
- recBuilder.reset(recType);
- } else
- recBuilder.reset(null);
-
- recBuilder.init();
- Token token = null;
- boolean inRecord = true;
- boolean expectingRecordField = false;
- boolean first = true;
-
- Boolean openRecordField = false;
- int fieldId = 0;
- IAType fieldType = null;
- do {
- token = nextToken();
- switch (token.kind) {
- case AdmLexerConstants.END_RECORD: {
- if (expectingRecordField) {
- throw new AsterixException(
- "Found END_RECORD while expecting a record field.");
- }
- inRecord = false;
- break;
- }
- case AdmLexerConstants.STRING_LITERAL: {
- // we've read the name of the field
- // now read the content
- fieldNameBuffer.reset();
- fieldValueBuffer.reset();
- expectingRecordField = false;
-
- if (recType != null) {
- String fldName = token.image.substring(1,
- token.image.length() - 1);
- fieldId = recBuilder.getFieldId(fldName);
- if (fieldId < 0 && !recType.isOpen()) {
- throw new AsterixException(
- "This record is closed, you can not add extra fields !!");
- } else if (fieldId < 0 && recType.isOpen()) {
- aStringFieldName.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aStringFieldName,
- fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- } else {
- // a closed field
- nulls.set(fieldId);
- fieldType = recType.getFieldTypes()[fieldId];
- openRecordField = false;
- }
- } else {
- aStringFieldName.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aStringFieldName,
- fieldNameBuffer.getDataOutput());
- openRecordField = true;
- fieldType = null;
- }
-
- token = nextToken();
- if (token.kind != AdmLexerConstants.COLON) {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind)
- + " while expecting \":\".");
- }
-
- token = nextToken();
- this.admFromLexerStream(token, fieldType,
- fieldValueBuffer.getDataOutput(), false);
- if (openRecordField) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL
- .serialize())
- recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
- } else if (recType.getFieldTypes()[fieldId].getTypeTag() == ATypeTag.UNION) {
- if (NonTaggedFormatUtil
- .isOptionalField((AUnionType) recType
- .getFieldTypes()[fieldId])) {
- if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL
- .serialize()) {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
- }
- } else {
- recBuilder.addField(fieldId, fieldValueBuffer);
- }
-
- break;
- }
- case AdmLexerConstants.COMMA: {
- if (first) {
- throw new AsterixException(
- "Found COMMA before any record field.");
- }
- if (expectingRecordField) {
- throw new AsterixException(
- "Found COMMA while expecting a record field.");
- }
- expectingRecordField = true;
- break;
- }
- default: {
- throw new AsterixException("Unexpected ADM token kind: "
- + admLexer.tokenKindToString(token.kind)
- + " while parsing record fields.");
- }
- }
- first = false;
- } while (inRecord);
-
- if (recType != null) {
- nullableFieldId = checkNullConstraints(recType, nulls);
- if (nullableFieldId != -1)
- throw new AsterixException("Field " + nullableFieldId
- + " can not be null");
- }
- recBuilder.write(out, true);
- returnRecordBuilder(recBuilder);
- returnTempBuffer(fieldNameBuffer);
- returnTempBuffer(fieldValueBuffer);
- }
-
- private int checkNullConstraints(ARecordType recType, BitSet nulls) {
-
- boolean isNull = false;
- for (int i = 0; i < recType.getFieldTypes().length; i++)
- if (nulls.get(i) == false) {
- IAType type = recType.getFieldTypes()[i];
- if (type.getTypeTag() != ATypeTag.NULL
- && type.getTypeTag() != ATypeTag.UNION)
- return i;
-
- if (type.getTypeTag() == ATypeTag.UNION) { // union
- unionList = ((AUnionType) type).getUnionList();
- for (int j = 0; j < unionList.size(); j++)
- if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
- isNull = true;
- break;
- }
- if (!isNull)
- return i;
- }
- }
- return -1;
- }
-
- private void parseOrderedList(AOrderedListType oltype, DataOutput out)
- throws IOException, AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
-
- IAType itemType = null;
- if (oltype != null)
- itemType = oltype.getItemType();
- orderedListBuilder.reset(oltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_ORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException(
- "Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException(
- "Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException(
- "Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
-
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(),
- false);
- orderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- orderedListBuilder.write(out, true);
- returnOrderedListBuilder(orderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private void parseUnorderedList(AUnorderedListType uoltype, DataOutput out)
- throws IOException, AsterixException {
-
- ArrayBackedValueStorage itemBuffer = getTempBuffer();
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
-
- IAType itemType = null;
-
- if (uoltype != null)
- itemType = uoltype.getItemType();
- unorderedListBuilder.reset(uoltype);
-
- Token token = null;
- boolean inList = true;
- boolean expectingListItem = false;
- boolean first = true;
- do {
- token = nextToken();
- if (token.kind == AdmLexerConstants.END_UNORDERED_LIST) {
- if (expectingListItem) {
- throw new AsterixException(
- "Found END_COLLECTION while expecting a list item.");
- }
- inList = false;
- } else if (token.kind == AdmLexerConstants.COMMA) {
- if (first) {
- throw new AsterixException(
- "Found COMMA before any list item.");
- }
- if (expectingListItem) {
- throw new AsterixException(
- "Found COMMA while expecting a list item.");
- }
- expectingListItem = true;
- } else {
- expectingListItem = false;
- itemBuffer.reset();
- admFromLexerStream(token, itemType, itemBuffer.getDataOutput(),
- false);
- unorderedListBuilder.addItem(itemBuffer);
- }
- first = false;
- } while (inList);
- unorderedListBuilder.write(out, true);
- returnUnorderedListBuilder(unorderedListBuilder);
- returnTempBuffer(itemBuffer);
- }
-
- private Token nextToken() throws AsterixException {
- try {
- return admLexer.next();
- } catch (ParseException pe) {
- throw new AsterixException(pe);
- }
- }
-
- private IARecordBuilder getRecordBuilder() {
- RecordBuilder recBuilder = (RecordBuilder) recordBuilderPool.poll();
- if (recBuilder != null)
- return recBuilder;
- else
- return new RecordBuilder();
- }
-
- private void returnRecordBuilder(IARecordBuilder recBuilder) {
- this.recordBuilderPool.add(recBuilder);
- }
-
- private IAsterixListBuilder getOrderedListBuilder() {
- OrderedListBuilder orderedListBuilder = (OrderedListBuilder) orderedListBuilderPool
- .poll();
- if (orderedListBuilder != null)
- return orderedListBuilder;
- else
- return new OrderedListBuilder();
- }
-
- private void returnOrderedListBuilder(
- IAsterixListBuilder orderedListBuilder) {
- this.orderedListBuilderPool.add(orderedListBuilder);
- }
-
- private IAsterixListBuilder getUnorderedListBuilder() {
- UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) unorderedListBuilderPool
- .poll();
- if (unorderedListBuilder != null)
- return unorderedListBuilder;
- else
- return new UnorderedListBuilder();
- }
-
- private void returnUnorderedListBuilder(
- IAsterixListBuilder unorderedListBuilder) {
- this.unorderedListBuilderPool.add(unorderedListBuilder);
- }
-
- private ArrayBackedValueStorage getTempBuffer() {
- ArrayBackedValueStorage tmpBaaos = baaosPool.poll();
- if (tmpBaaos != null) {
- return tmpBaaos;
- } else {
- return new ArrayBackedValueStorage();
- }
- }
-
- private void returnTempBuffer(ArrayBackedValueStorage tempBaaos) {
- baaosPool.add(tempBaaos);
- }
-
- private void parseConstructor(ATypeTag typeTag, IAType objectType,
- DataOutput out) throws AsterixException {
- try {
- Token token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_OPEN) {
- if (checkType(typeTag, objectType, out)) {
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.STRING_LITERAL) {
- switch (typeTag) {
- case BOOLEAN:
- parseBoolean(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT8:
- parseInt8(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT16:
- parseInt16(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT32:
- parseInt32(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case INT64:
- parseInt64(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case FLOAT:
- aFloat.setValue(Float.parseFloat(token.image
- .substring(1, token.image.length() - 1)));
- floatSerde.serialize(aFloat, out);
- break;
- case DOUBLE:
- aDouble.setValue(Double.parseDouble(token.image
- .substring(1, token.image.length() - 1)));
- doubleSerde.serialize(aDouble, out);
- break;
- case STRING:
- aString.setValue(token.image.substring(1,
- token.image.length() - 1));
- stringSerde.serialize(aString, out);
- break;
- case TIME:
- parseTime(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case DATE:
- parseDate(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case DATETIME:
- parseDatetime(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case DURATION:
- parseDuration(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case POINT:
- parsePoint(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case POINT3D:
- parsePoint3d(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case CIRCLE:
- parseCircle(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case RECTANGLE:
- parseRectangle(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case LINE:
- parseLine(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
- case POLYGON:
- parsePolygon(
- token.image.substring(1,
- token.image.length() - 1), out);
- break;
-
- }
- token = admLexer.next();
- if (token.kind == AdmLexerConstants.CONSTRUCTOR_CLOSE)
- return;
- }
- }
- }
- } catch (Exception e) {
- throw new AsterixException(e);
- }
- throw new AsterixException(mismatchErrorMessage
- + objectType.getTypeName());
- }
-
- private void parseBoolean(String bool, DataOutput out)
- throws AsterixException {
- String errorMessage = "This can not be an instance of boolean";
- try {
- if (bool.equals("true"))
- booleanSerde.serialize(ABoolean.TRUE, out);
- else if (bool.equals("false"))
- booleanSerde.serialize(ABoolean.FALSE, out);
- else
- throw new AsterixException(errorMessage);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt8(String int8, DataOutput out) throws AsterixException {
- String errorMessage = "This can not be an instance of int8";
- try {
- boolean positive = true;
- byte value = 0;
- int offset = 0;
-
- if (int8.charAt(offset) == '+')
- offset++;
- else if (int8.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int8.length(); offset++) {
- if (int8.charAt(offset) >= '0' && int8.charAt(offset) <= '9')
- value = (byte) (value * 10 + int8.charAt(offset) - '0');
- else if (int8.charAt(offset) == 'i'
- && int8.charAt(offset + 1) == '8'
- && offset + 2 == int8.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt8.setValue(value);
- int8Serde.serialize(aInt8, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt16(String int16, DataOutput out)
- throws AsterixException {
- String errorMessage = "This can not be an instance of int16";
- try {
- boolean positive = true;
- short value = 0;
- int offset = 0;
-
- if (int16.charAt(offset) == '+')
- offset++;
- else if (int16.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int16.length(); offset++) {
- if (int16.charAt(offset) >= '0' && int16.charAt(offset) <= '9')
- value = (short) (value * 10 + int16.charAt(offset) - '0');
- else if (int16.charAt(offset) == 'i'
- && int16.charAt(offset + 1) == '1'
- && int16.charAt(offset + 2) == '6'
- && offset + 3 == int16.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
- aInt16.setValue(value);
- int16Serde.serialize(aInt16, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt32(String int32, DataOutput out)
- throws AsterixException {
-
- String errorMessage = "This can not be an instance of int32";
- try {
- boolean positive = true;
- int value = 0;
- int offset = 0;
-
- if (int32.charAt(offset) == '+')
- offset++;
- else if (int32.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int32.length(); offset++) {
- if (int32.charAt(offset) >= '0' && int32.charAt(offset) <= '9')
- value = (value * 10 + int32.charAt(offset) - '0');
- else if (int32.charAt(offset) == 'i'
- && int32.charAt(offset + 1) == '3'
- && int32.charAt(offset + 2) == '2'
- && offset + 3 == int32.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt32.setValue(value);
- int32Serde.serialize(aInt32, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parseInt64(String int64, DataOutput out)
- throws AsterixException {
- String errorMessage = "This can not be an instance of int64";
- try {
- boolean positive = true;
- long value = 0;
- int offset = 0;
-
- if (int64.charAt(offset) == '+')
- offset++;
- else if (int64.charAt(offset) == '-') {
- offset++;
- positive = false;
- }
- for (; offset < int64.length(); offset++) {
- if (int64.charAt(offset) >= '0' && int64.charAt(offset) <= '9')
- value = (value * 10 + int64.charAt(offset) - '0');
- else if (int64.charAt(offset) == 'i'
- && int64.charAt(offset + 1) == '6'
- && int64.charAt(offset + 2) == '4'
- && offset + 3 == int64.length())
- break;
- else
- throw new AsterixException(errorMessage);
- }
- if (value < 0)
- throw new AsterixException(errorMessage);
- if (value > 0 && !positive)
- value *= -1;
-
- aInt64.setValue(value);
- int64Serde.serialize(aInt64, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(errorMessage);
- }
- }
-
- private void parsePoint(String point, DataOutput out)
- throws AsterixException {
- try {
- APointSerializerDeserializer.parse(point, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePoint3d(String point3d, DataOutput out)
- throws AsterixException {
- try {
- APoint3DSerializerDeserializer.parse(point3d, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseCircle(String circle, DataOutput out)
- throws AsterixException {
- try {
- ACircleSerializerDeserializer.parse(circle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseRectangle(String rectangle, DataOutput out)
- throws AsterixException {
- try {
- ARectangleSerializerDeserializer.parse(rectangle, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseLine(String line, DataOutput out) throws AsterixException {
- try {
- ALineSerializerDeserializer.parse(line, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parsePolygon(String polygon, DataOutput out)
- throws AsterixException, IOException {
- try {
- APolygonSerializerDeserializer.parse(polygon, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseTime(String time, DataOutput out) throws AsterixException {
- try {
- ATimeSerializerDeserializer.parse(time, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
-
- private void parseDate(String date, DataOutput out)
- throws AsterixException, IOException {
- try {
- ADateSerializerDeserializer.parse(date, out);
- } catch (HyracksDataException e) {
- throw new AsterixException(e);
- }
- }
+ @Override
+ public IDataParser getDataParser() {
+ return new ADMDataParser();
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java
new file mode 100644
index 0000000..c9560fe
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataParser.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Arrays;
+
+import edu.uci.ics.asterix.builders.IARecordBuilder;
+import edu.uci.ics.asterix.builders.RecordBuilder;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.base.AMutableString;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+
+public class DelimitedDataParser extends AbstractDataParser implements IDataParser {
+
+ protected final IValueParserFactory[] valueParserFactories;
+ protected final char fieldDelimiter;
+ protected final ARecordType recordType;
+
+ private IARecordBuilder recBuilder;
+ private ArrayBackedValueStorage fieldValueBuffer;
+ private DataOutput fieldValueBufferOutput;
+ private IValueParser[] valueParsers;
+ private FieldCursor cursor;
+ private byte[] fieldTypeTags;
+ private int[] fldIds;
+ private ArrayBackedValueStorage[] nameBuffers;
+
+ public DelimitedDataParser(ARecordType recordType, IValueParserFactory[] valueParserFactories, char fieldDelimter) {
+ this.recordType = recordType;
+ this.valueParserFactories = valueParserFactories;
+ this.fieldDelimiter = fieldDelimter;
+ }
+
+ @Override
+ public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException,
+ IOException {
+
+ valueParsers = new IValueParser[valueParserFactories.length];
+ for (int i = 0; i < valueParserFactories.length; ++i) {
+ valueParsers[i] = valueParserFactories[i].createValueParser();
+ }
+
+ fieldValueBuffer = new ArrayBackedValueStorage();
+ fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
+ recBuilder = new RecordBuilder();
+ recBuilder.reset(recordType);
+ recBuilder.init();
+
+ int n = recordType.getFieldNames().length;
+ fieldTypeTags = new byte[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+ fieldTypeTags[i] = tag.serialize();
+ }
+
+ fldIds = new int[n];
+ nameBuffers = new ArrayBackedValueStorage[n];
+ AMutableString str = new AMutableString(null);
+ for (int i = 0; i < n; i++) {
+ String name = recordType.getFieldNames()[i];
+ fldIds[i] = recBuilder.getFieldId(name);
+ if (fldIds[i] < 0) {
+ if (!recordType.isOpen()) {
+ throw new HyracksDataException("Illegal field " + name + " in closed type " + recordType);
+ } else {
+ nameBuffers[i] = new ArrayBackedValueStorage();
+ fieldNameToBytes(name, str, nameBuffers[i]);
+ }
+ }
+ }
+
+ cursor = new FieldCursor(new InputStreamReader(in));
+
+ }
+
+ @Override
+ public boolean parse(DataOutput out) throws AsterixException, IOException {
+
+ if (cursor.nextRecord()) {
+ recBuilder.reset(recordType);
+ recBuilder.init();
+ for (int i = 0; i < valueParsers.length; ++i) {
+ if (!cursor.nextField()) {
+ break;
+ }
+ fieldValueBuffer.reset();
+ fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
+ valueParsers[i]
+ .parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, fieldValueBufferOutput);
+ if (fldIds[i] < 0) {
+ recBuilder.addField(nameBuffers[i], fieldValueBuffer);
+ } else {
+ recBuilder.addField(fldIds[i], fieldValueBuffer);
+ }
+ }
+ recBuilder.write(out, true);
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ protected void fieldNameToBytes(String fieldName, AMutableString str, ArrayBackedValueStorage buffer)
+ throws HyracksDataException {
+ buffer.reset();
+ DataOutput out = buffer.getDataOutput();
+ str.setValue(fieldName);
+ try {
+ stringSerde.serialize(str, out);
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ protected enum State {
+ INIT,
+ IN_RECORD,
+ EOR,
+ CR,
+ EOF
+ }
+
+ protected class FieldCursor {
+ private static final int INITIAL_BUFFER_SIZE = 4096;
+ private static final int INCREMENT = 4096;
+
+ private final Reader in;
+
+ private char[] buffer;
+ private int start;
+ private int end;
+ private State state;
+
+ private int fStart;
+ private int fEnd;
+
+ public FieldCursor(Reader in) {
+ this.in = in;
+ buffer = new char[INITIAL_BUFFER_SIZE];
+ start = 0;
+ end = 0;
+ state = State.INIT;
+ }
+
+ public boolean nextRecord() throws IOException {
+ while (true) {
+ switch (state) {
+ case INIT:
+ boolean eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return false;
+ } else {
+ state = State.IN_RECORD;
+ return true;
+ }
+
+ case IN_RECORD:
+ int p = start;
+ while (true) {
+ if (p >= end) {
+ int s = start;
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return start < end;
+ }
+ p -= (s - start);
+ }
+ char ch = buffer[p];
+ if (ch == '\n') {
+ start = p + 1;
+ state = State.EOR;
+ break;
+ } else if (ch == '\r') {
+ start = p + 1;
+ state = State.CR;
+ break;
+ }
+ ++p;
+ }
+ break;
+
+ case CR:
+ if (start >= end) {
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return false;
+ }
+ }
+ char ch = buffer[start];
+ if (ch == '\n') {
+ ++start;
+ state = State.EOR;
+ } else {
+ state = State.IN_RECORD;
+ return true;
+ }
+
+ case EOR:
+ if (start >= end) {
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return false;
+ }
+ }
+ state = State.IN_RECORD;
+ return start < end;
+
+ case EOF:
+ return false;
+ }
+ }
+ }
+
+ public boolean nextField() throws IOException {
+ switch (state) {
+ case INIT:
+ case EOR:
+ case EOF:
+ case CR:
+ return false;
+
+ case IN_RECORD:
+ boolean eof;
+ int p = start;
+ while (true) {
+ if (p >= end) {
+ int s = start;
+ eof = !readMore();
+ if (eof) {
+ state = State.EOF;
+ return true;
+ }
+ p -= (s - start);
+ }
+ char ch = buffer[p];
+ if (ch == fieldDelimiter) {
+ fStart = start;
+ fEnd = p;
+ start = p + 1;
+ return true;
+ } else if (ch == '\n') {
+ fStart = start;
+ fEnd = p;
+ start = p + 1;
+ state = State.EOR;
+ return true;
+ } else if (ch == '\r') {
+ fStart = start;
+ fEnd = p;
+ start = p + 1;
+ state = State.CR;
+ return true;
+ }
+ ++p;
+ }
+ }
+ throw new IllegalStateException();
+ }
+
+ protected boolean readMore() throws IOException {
+ if (start > 0) {
+ System.arraycopy(buffer, start, buffer, 0, end - start);
+ }
+ end -= start;
+ start = 0;
+
+ if (end == buffer.length) {
+ buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
+ }
+
+ int n = in.read(buffer, end, buffer.length - end);
+ if (n < 0) {
+ return false;
+ }
+ end += n;
+ return true;
+ }
+
+ public int getfStart() {
+ return fStart;
+ }
+
+ public void setfStart(int fStart) {
+ this.fStart = fStart;
+ }
+
+ public int getfEnd() {
+ return fEnd;
+ }
+
+ public void setfEnd(int fEnd) {
+ this.fEnd = fEnd;
+ }
+
+ public char[] getBuffer() {
+ return buffer;
+ }
+ }
+
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
index abd2ade..c029b64 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/DelimitedDataTupleParser.java
@@ -1,331 +1,40 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.om.base.AMutableString;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+/**
+ * An extension of AbstractTupleParser that provides functionality for
+ * parsing delimited files.
+ */
public class DelimitedDataTupleParser extends AbstractTupleParser {
- protected final IValueParserFactory[] valueParserFactories;
- protected final char fieldDelimiter;
- protected final IHyracksTaskContext ctx;
- protected final ARecordType recType;
- protected ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- protected DataOutput recDos = tb.getDataOutput();
- protected final FrameTupleAppender appender;
- protected final ByteBuffer frame;
-
+ private final DelimitedDataParser dataParser;
- public DelimitedDataTupleParser(IHyracksTaskContext ctx,
- ARecordType recType, IValueParserFactory[] valueParserFactories,
- char fieldDelimter) {
- this.valueParserFactories = valueParserFactories;
- this.fieldDelimiter = fieldDelimter;
- this.ctx = ctx;
- this.recType = recType;
- appender = new FrameTupleAppender(ctx.getFrameSize());
- frame = ctx.allocateFrame();
- }
+ public DelimitedDataTupleParser(IHyracksTaskContext ctx, ARecordType recType,
+ IValueParserFactory[] valueParserFactories, char fieldDelimter) {
+ super(ctx, recType);
+ dataParser = new DelimitedDataParser(recType, valueParserFactories, fieldDelimter);
+ }
- @Override
- public void parse(InputStream in, IFrameWriter writer)
- throws HyracksDataException {
- try {
- IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
- for (int i = 0; i < valueParserFactories.length; ++i) {
- valueParsers[i] = valueParserFactories[i].createValueParser();
- }
-
- appender.reset(frame, true);
-
-
- ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
- DataOutput fieldValueBufferOutput = fieldValueBuffer
- .getDataOutput();
- IARecordBuilder recBuilder = new RecordBuilder();
- recBuilder.reset(recType);
- recBuilder.init();
-
- int n = recType.getFieldNames().length;
- byte[] fieldTypeTags = new byte[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recType.getFieldTypes()[i].getTypeTag();
- fieldTypeTags[i] = tag.serialize();
- }
-
- int[] fldIds = new int[n];
- ArrayBackedValueStorage[] nameBuffers = new ArrayBackedValueStorage[n];
- AMutableString str = new AMutableString(null);
- for (int i = 0; i < n; i++) {
- String name = recType.getFieldNames()[i];
- fldIds[i] = recBuilder.getFieldId(name);
- if (fldIds[i] < 0) {
- if (!recType.isOpen()) {
- throw new HyracksDataException("Illegal field " + name
- + " in closed type " + recType);
- } else {
- nameBuffers[i] = new ArrayBackedValueStorage();
- fieldNameToBytes(name, str, nameBuffers[i]);
- }
- }
- }
-
- FieldCursor cursor = new FieldCursor(new InputStreamReader(in));
- while (cursor.nextRecord()) {
- tb.reset();
- recBuilder.reset(recType);
- recBuilder.init();
-
- for (int i = 0; i < valueParsers.length; ++i) {
- if (!cursor.nextField()) {
- break;
- }
- fieldValueBuffer.reset();
- fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
- valueParsers[i]
- .parse(cursor.buffer, cursor.fStart, cursor.fEnd
- - cursor.fStart, fieldValueBufferOutput);
- if (fldIds[i] < 0) {
- recBuilder.addField(nameBuffers[i], fieldValueBuffer);
- } else {
- recBuilder.addField(fldIds[i], fieldValueBuffer);
- }
- }
- recBuilder.write(recDos, true);
- tb.addFieldEndOffset();
-
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- protected void fieldNameToBytes(String fieldName, AMutableString str,
- ArrayBackedValueStorage buffer) throws HyracksDataException {
- buffer.reset();
- DataOutput out = buffer.getDataOutput();
- str.setValue(fieldName);
- try {
- stringSerde.serialize(str, out);
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- protected enum State {
- INIT, IN_RECORD, EOR, CR, EOF
- }
-
- protected class FieldCursor {
- private static final int INITIAL_BUFFER_SIZE = 4096;
- private static final int INCREMENT = 4096;
-
- private final Reader in;
-
- private char[] buffer;
- private int start;
- private int end;
- private State state;
-
- private int fStart;
- private int fEnd;
-
- public FieldCursor(Reader in) {
- this.in = in;
- buffer = new char[INITIAL_BUFFER_SIZE];
- start = 0;
- end = 0;
- state = State.INIT;
- }
-
- public boolean nextRecord() throws IOException {
- while (true) {
- switch (state) {
- case INIT:
- boolean eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case IN_RECORD:
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return start < end;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == '\n') {
- start = p + 1;
- state = State.EOR;
- break;
- } else if (ch == '\r') {
- start = p + 1;
- state = State.CR;
- break;
- }
- ++p;
- }
- break;
-
- case CR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- char ch = buffer[start];
- if (ch == '\n') {
- ++start;
- state = State.EOR;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case EOR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- state = State.IN_RECORD;
- return start < end;
-
- case EOF:
- return false;
- }
- }
- }
-
- public boolean nextField() throws IOException {
- switch (state) {
- case INIT:
- case EOR:
- case EOF:
- case CR:
- return false;
-
- case IN_RECORD:
- boolean eof;
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return true;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == fieldDelimiter) {
- fStart = start;
- fEnd = p;
- start = p + 1;
- return true;
- } else if (ch == '\n') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.EOR;
- return true;
- } else if (ch == '\r') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.CR;
- return true;
- }
- ++p;
- }
- }
- throw new IllegalStateException();
- }
-
- protected boolean readMore() throws IOException {
- if (start > 0) {
- System.arraycopy(buffer, start, buffer, 0, end - start);
- }
- end -= start;
- start = 0;
-
- if (end == buffer.length) {
- buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
- }
-
- int n = in.read(buffer, end, buffer.length - end);
- if (n < 0) {
- return false;
- }
- end += n;
- return true;
- }
-
- public int getfStart() {
- return fStart;
- }
-
- public void setfStart(int fStart) {
- this.fStart = fStart;
- }
-
- public int getfEnd() {
- return fEnd;
- }
-
- public void setfEnd(int fEnd) {
- this.fEnd = fEnd;
- }
-
- public char[] getBuffer() {
- return buffer;
- }
- }
+ @Override
+ public IDataParser getDataParser() {
+ return dataParser;
+ }
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java
new file mode 100644
index 0000000..f23aeac
--- /dev/null
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/IDataParser.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.runtime.operators.file;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.io.InputStream;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.om.types.ARecordType;
+
+/**
+ * Interface implemented by a parser
+ */
+public interface IDataParser {
+
+ /**
+ * Initialize the parser prior to actual parsing.
+ *
+ * @param in
+ * input stream to be parsed
+ * @param recordType
+ * record type associated with input data
+ * @param datasetRec
+ * boolean flag set to true if input data represents dataset
+ * records.
+ * @throws AsterixException
+ * @throws IOException
+ */
+ public void initialize(InputStream in, ARecordType recordType, boolean datasetRec) throws AsterixException,
+ IOException;
+
+ /**
+ * Parse data from source input stream and output ADM records.
+ *
+ * @param out
+ * DataOutput instance that for writing the parser output.
+ * @return
+ * @throws AsterixException
+ * @throws IOException
+ */
+ public boolean parse(DataOutput out) throws AsterixException, IOException;
+}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
index cfe94ef..86fba12 100644
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
+++ b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/operators/file/NtDelimitedDataTupleParserFactory.java
@@ -1,42 +1,34 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.asterix.runtime.operators.file;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-
-import edu.uci.ics.asterix.builders.IARecordBuilder;
-import edu.uci.ics.asterix.builders.RecordBuilder;
-import edu.uci.ics.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import edu.uci.ics.asterix.om.base.AMutableString;
-import edu.uci.ics.asterix.om.base.AString;
import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.asterix.om.types.ATypeTag;
-import edu.uci.ics.asterix.om.types.BuiltinType;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.util.ArrayBackedValueStorage;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParser;
import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+/**
+ * A tuple parser factory for creating a tuple parser capable of parsing
+ * delimited data.
+ */
public class NtDelimitedDataTupleParserFactory implements ITupleParserFactory {
private static final long serialVersionUID = 1L;
protected ARecordType recordType;
protected IValueParserFactory[] valueParserFactories;
protected char fieldDelimiter;
- @SuppressWarnings("unchecked")
- private ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
public NtDelimitedDataTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
char fieldDelimiter) {
@@ -47,271 +39,7 @@
@Override
public ITupleParser createTupleParser(final IHyracksTaskContext ctx) {
- return new ITupleParser() {
- @Override
- public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
- try {
- IValueParser[] valueParsers = new IValueParser[valueParserFactories.length];
- for (int i = 0; i < valueParserFactories.length; ++i) {
- valueParsers[i] = valueParserFactories[i].createValueParser();
- }
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- appender.reset(frame, true);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(1);
- DataOutput recDos = tb.getDataOutput();
-
- ArrayBackedValueStorage fieldValueBuffer = new ArrayBackedValueStorage();
- DataOutput fieldValueBufferOutput = fieldValueBuffer.getDataOutput();
- IARecordBuilder recBuilder = new RecordBuilder();
- recBuilder.reset(recordType);
- recBuilder.init();
-
- int n = recordType.getFieldNames().length;
- byte[] fieldTypeTags = new byte[n];
- for (int i = 0; i < n; i++) {
- ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
- fieldTypeTags[i] = tag.serialize();
- }
-
- int[] fldIds = new int[n];
- ArrayBackedValueStorage[] nameBuffers = new ArrayBackedValueStorage[n];
- AMutableString str = new AMutableString(null);
- for (int i = 0; i < n; i++) {
- String name = recordType.getFieldNames()[i];
- fldIds[i] = recBuilder.getFieldId(name);
- if (fldIds[i] < 0) {
- if (!recordType.isOpen()) {
- throw new HyracksDataException("Illegal field " + name + " in closed type "
- + recordType);
- } else {
- nameBuffers[i] = new ArrayBackedValueStorage();
- fieldNameToBytes(name, str, nameBuffers[i]);
- }
- }
- }
-
- FieldCursor cursor = new FieldCursor(new InputStreamReader(in));
- while (cursor.nextRecord()) {
- tb.reset();
- recBuilder.reset(recordType);
- recBuilder.init();
-
- for (int i = 0; i < valueParsers.length; ++i) {
- if (!cursor.nextField()) {
- break;
- }
- fieldValueBuffer.reset();
- fieldValueBufferOutput.writeByte(fieldTypeTags[i]);
- valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart,
- fieldValueBufferOutput);
- if (fldIds[i] < 0) {
- recBuilder.addField(nameBuffers[i], fieldValueBuffer);
- } else {
- recBuilder.addField(fldIds[i], fieldValueBuffer);
- }
- }
- recBuilder.write(recDos, true);
- tb.addFieldEndOffset();
-
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- private void fieldNameToBytes(String fieldName, AMutableString str, ArrayBackedValueStorage buffer)
- throws HyracksDataException {
- buffer.reset();
- DataOutput out = buffer.getDataOutput();
- str.setValue(fieldName);
- try {
- stringSerde.serialize(str, out);
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
- }
-
- };
- }
-
- private enum State {
- INIT,
- IN_RECORD,
- EOR,
- CR,
- EOF
- }
-
- private class FieldCursor {
- private static final int INITIAL_BUFFER_SIZE = 4096;
- private static final int INCREMENT = 4096;
-
- private final Reader in;
-
- private char[] buffer;
- private int start;
- private int end;
- private State state;
-
- private int fStart;
- private int fEnd;
-
- public FieldCursor(Reader in) {
- this.in = in;
- buffer = new char[INITIAL_BUFFER_SIZE];
- start = 0;
- end = 0;
- state = State.INIT;
- }
-
- public boolean nextRecord() throws IOException {
- while (true) {
- switch (state) {
- case INIT:
- boolean eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case IN_RECORD:
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return start < end;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == '\n') {
- start = p + 1;
- state = State.EOR;
- break;
- } else if (ch == '\r') {
- start = p + 1;
- state = State.CR;
- break;
- }
- ++p;
- }
- break;
-
- case CR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- char ch = buffer[start];
- if (ch == '\n') {
- ++start;
- state = State.EOR;
- } else {
- state = State.IN_RECORD;
- return true;
- }
-
- case EOR:
- if (start >= end) {
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return false;
- }
- }
- state = State.IN_RECORD;
- return start < end;
-
- case EOF:
- return false;
- }
- }
- }
-
- public boolean nextField() throws IOException {
- switch (state) {
- case INIT:
- case EOR:
- case EOF:
- case CR:
- return false;
-
- case IN_RECORD:
- boolean eof;
- int p = start;
- while (true) {
- if (p >= end) {
- int s = start;
- eof = !readMore();
- if (eof) {
- state = State.EOF;
- return true;
- }
- p -= (s - start);
- }
- char ch = buffer[p];
- if (ch == fieldDelimiter) {
- fStart = start;
- fEnd = p;
- start = p + 1;
- return true;
- } else if (ch == '\n') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.EOR;
- return true;
- } else if (ch == '\r') {
- fStart = start;
- fEnd = p;
- start = p + 1;
- state = State.CR;
- return true;
- }
- ++p;
- }
- }
- throw new IllegalStateException();
- }
-
- private boolean readMore() throws IOException {
- if (start > 0) {
- System.arraycopy(buffer, start, buffer, 0, end - start);
- }
- end -= start;
- start = 0;
-
- if (end == buffer.length) {
- buffer = Arrays.copyOf(buffer, buffer.length + INCREMENT);
- }
-
- int n = in.read(buffer, end, buffer.length - end);
- if (n < 0) {
- return false;
- }
- end += n;
- return true;
- }
+ return new DelimitedDataTupleParser(ctx, recordType, valueParserFactories, fieldDelimiter);
}
}
diff --git a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/AsterixRuntimeUtil.java b/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/AsterixRuntimeUtil.java
deleted file mode 100644
index ab57288..0000000
--- a/asterix-runtime/src/main/java/edu/uci/ics/asterix/runtime/util/AsterixRuntimeUtil.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Copyright 2009-2011 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.runtime.util;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import edu.uci.ics.asterix.common.api.AsterixAppContextInfoImpl;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-
-public class AsterixRuntimeUtil {
-
- public static Set<String> getNodeControllersOnIP(String ipAddress) throws AsterixException {
- Map<String, Set<String>> nodeControllerInfo = AsterixAppContextInfoImpl.getNodeControllerMap();
- Set<String> nodeControllersAtLocation = nodeControllerInfo.get(ipAddress);
- return nodeControllersAtLocation;
- }
-
- public static Set<String> getNodeControllersOnHostName(String hostName) throws UnknownHostException {
- Map<String, Set<String>> nodeControllerInfo = AsterixAppContextInfoImpl.getNodeControllerMap();
- String address;
- address = InetAddress.getByName(hostName).getHostAddress();
- if (address.equals("127.0.1.1")) {
- address = "127.0.0.1";
- }
- Set<String> nodeControllersAtLocation = nodeControllerInfo.get(address);
- return nodeControllersAtLocation;
- }
-
- public static List<String> getAllNodeControllers() {
-
- Collection<Set<String>> nodeControllersCollection = AsterixAppContextInfoImpl.getNodeControllerMap().values();
- List<String> nodeControllers = new ArrayList<String>();
- for (Set<String> ncCollection : nodeControllersCollection) {
- nodeControllers.addAll(ncCollection);
- }
- return nodeControllers;
- }
-}
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index 5a8ea70..62176d9 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -1,13 +1,12 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>asterix</artifactId>
<groupId>edu.uci.ics.asterix</groupId>
<version>0.0.4-SNAPSHOT</version>
</parent>
- <groupId>edu.uci.ics.asterix</groupId>
<artifactId>asterix-tools</artifactId>
- <version>0.0.4-SNAPSHOT</version>
<build>
<plugins>
@@ -21,6 +20,29 @@
</configuration>
</plugin>
<plugin>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>aqlclient</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ <phase>package</phase>
+ <configuration>
+ <classifier>aqlclient</classifier>
+ <archive>
+ <manifest>
+ <MainClass>edu.uci.ics.asterix.tools.aqlclient.AqlClient</MainClass>
+ </manifest>
+ </archive>
+ <includes>
+ <include>**/uci/ics/asterix/tools/aqlclient/*</include>
+ </includes>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.7.2</version>
<executions>
@@ -112,6 +134,18 @@
<scope>compile</scope>
</dependency>
<dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>4.2.2</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpcore</artifactId>
+ <version>4.2.2</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.8.1</version>
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/aqlclient/AqlClient.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/aqlclient/AqlClient.java
new file mode 100644
index 0000000..911b68b
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/aqlclient/AqlClient.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.asterix.tools.aqlclient;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.entity.UrlEncodedFormEntity;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.message.BasicNameValuePair;
+import org.apache.http.util.EntityUtils;
+
+/**
+ * This class is to automate AQL queries for benchmarking.
+ * The code is written by Pouria for the purpose of benchmarking ASTERIX.
+ */
+public class AqlClient {
+
+ static ArrayList<String> qFiles;
+ static ArrayList<String> qNames;
+
+ /*
+ * This code loads a set of AQL-Queries and runs them against Asterix. It
+ * runs the queries for a number of iterations, specified by the user. For
+ * each query it shows the Hyracks time, Asterix Time, and Client time on
+ * the screen, while it also dumps its stats into an output file, to make it
+ * persistent for future use/reference.
+ */
+ public static void main(String args[]) throws Exception {
+ /*
+ * Arguments: args0 - Path to the file, that contains list of query
+ * files. The assumption is that each AQL query, is saved in a file, and
+ * its path is mentioned in arg[0]. Each line of arg[0] file, denotes
+ * one query file. args1 - IP-Address of CC args2 - Path to the output
+ * file, for dumping stats args3 - Number of iterations, you want the
+ * simulation to be run args4 - Set it to true, so you stats, on the
+ * console, as queries are running.
+ */
+ if (args.length != 5) {
+ System.out
+ .println("Usage: [0]=List-of-Query-Files, [1]=ccIPadr, [2]=outputFile, [3]=Iteration# , [4]=boolean-showOnConsole ");
+ return;
+ }
+ qFiles = new ArrayList<String>();
+ qNames = new ArrayList<String>();
+
+ loadQueriesPaths(args[0]);
+ String ccUrl = args[1];
+ String outputFile = args[2];
+ int iteration = Integer.parseInt(args[3]);
+ boolean showOnConsole = Boolean.parseBoolean(args[4]);
+
+ PrintWriter pw = new PrintWriter(new File(outputFile));
+ pw.println("Code\tStatus\tName\tHyracksTime\tAsterixTime\tClientTime\n");
+
+ DefaultHttpClient httpclient = new DefaultHttpClient();
+ HttpPost httpPost = new HttpPost("http://" + ccUrl + ":19001");
+ List<NameValuePair> nvps = new ArrayList<NameValuePair>();
+ nvps.add(new BasicNameValuePair("hyracks-port", "1098"));
+ nvps.add(new BasicNameValuePair("hyracks-ip", ccUrl));
+ nvps.add(new BasicNameValuePair("display-result", "true"));
+ nvps.add(new BasicNameValuePair("query", null)); // it will get its
+ // value in the loop
+ // below
+
+ int ixToremove = nvps.size() - 1;
+ try {
+ for (int i = 0; i < iteration; i++) {
+ for (int x = 0; x < qFiles.size(); x++) {
+ nvps.remove(ixToremove);
+ String query = readQueryFromFile(qFiles.get(x));
+ String qName = qNames.get(x);
+ System.out.println("\n\nNow Running Query " + qName + " in iteration " + i);
+ nvps.add(new BasicNameValuePair("query", query));
+
+ httpPost.setEntity(new UrlEncodedFormEntity(nvps));
+ long s = System.currentTimeMillis();
+ HttpResponse response = httpclient.execute(httpPost);
+ long e = System.currentTimeMillis();
+
+ String status = response.getStatusLine().toString();
+ HttpEntity entity = response.getEntity();
+ String content = EntityUtils.toString(entity);
+ EntityUtils.consume(entity);
+
+ double[] times = extractStats(content);
+ double endToend = ((double) (e - s)) / 1000.00; // This is
+ // the
+ // client-time
+ // (end to
+ // end delay
+ // from
+ // client's
+ // perspective)
+ pw.print(status + "\t" + qName + "\t" + times[0] + "\t" + times[1] + "\t" + endToend + "\n");
+
+ if (showOnConsole) {
+ // System.out.println("Iteration "+i+"\n"+content+"\n");
+ // //Uncomment this line, if you want to see the whole
+ // content of the returned HTML page
+ System.out.print(qName + "\t" + status + "\t" + times[0] + "\t" + times[1] + "\t" + endToend
+ + " (iteration " + (i) + ")\n");
+ }
+ }
+ pw.println();
+ }
+ } finally {
+ pw.close();
+ httpPost.releaseConnection();
+ }
+
+ }
+
+ // Assumption: It contains one query file path per line
+ private static void loadQueriesPaths(String qFilesPath) throws Exception {
+ BufferedReader in = new BufferedReader(new FileReader(qFilesPath));
+ String str;
+ while ((str = in.readLine()) != null) {
+ qFiles.add(str.trim());
+ int nameIx = str.lastIndexOf('/');
+ qNames.add(new String(str.substring(nameIx + 1)));
+ }
+ in.close();
+ }
+
+ private static String readQueryFromFile(String filePath) throws Exception {
+ BufferedReader in = new BufferedReader(new FileReader(filePath));
+ String query = "";
+ String str;
+ while ((str = in.readLine()) != null) {
+ query += str + "\n";
+ }
+ in.close();
+ return query;
+ }
+
+ private static double[] extractStats(String content) {
+ int hyracksTimeIx = content.indexOf("<PRE>Duration:");
+ if (hyracksTimeIx < 0) {
+ return new double[] { -1, -1 };
+ }
+ int endHyracksTimeIx = content.indexOf("</PRE>", hyracksTimeIx);
+ double hTime = Double.parseDouble(content.substring(hyracksTimeIx + 14, endHyracksTimeIx));
+
+ int totalTimeSIx = content.indexOf("Duration", endHyracksTimeIx);
+ if (totalTimeSIx < 0) {
+ return new double[] { hTime, -1 };
+ }
+ int totalTimeEIx = content.indexOf("\n", totalTimeSIx);
+ double tTime = Double.parseDouble(content.substring(totalTimeSIx + 10, totalTimeEIx));
+
+ return new double[] { hTime, tTime };
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
new file mode 100644
index 0000000..84b989d
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.FileSystemBasedAdapter;
+import edu.uci.ics.asterix.external.dataset.adapter.ITypedDatasourceAdapter;
+import edu.uci.ics.asterix.feed.managed.adapter.IManagedFeedAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.runtime.operators.file.ADMDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.AbstractTupleParser;
+import edu.uci.ics.asterix.runtime.operators.file.DelimitedDataParser;
+import edu.uci.ics.asterix.runtime.operators.file.IDataParser;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParser;
+import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
+
+/**
+ * An adapter that simulates a feed from the contents of a source file. The file can be on the local file
+ * system or on HDFS. The feed ends when the content of the source file has been ingested.
+ */
+public class RateControlledFileSystemBasedAdapter extends FileSystemBasedAdapter implements ITypedDatasourceAdapter,
+ IManagedFeedAdapter {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final String KEY_FILE_SYSTEM = "fs";
+ public static final String LOCAL_FS = "localfs";
+ public static final String HDFS = "hdfs";
+
+ private final FileSystemBasedAdapter coreAdapter;
+ private final Map<String, String> configuration;
+ private final String fileSystem;
+ private final String format;
+
+ public RateControlledFileSystemBasedAdapter(ARecordType atype, Map<String, String> configuration) throws Exception {
+ super(atype);
+ checkRequiredArgs(configuration);
+ fileSystem = configuration.get(KEY_FILE_SYSTEM);
+ String adapterFactoryClass = null;
+ if (fileSystem.equalsIgnoreCase(LOCAL_FS)) {
+ adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory";
+ } else if (fileSystem.equals(HDFS)) {
+ adapterFactoryClass = "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory";
+ } else {
+ throw new AsterixException("Unsupported file system type " + fileSystem);
+ }
+ format = configuration.get(KEY_FORMAT);
+ IGenericDatasetAdapterFactory adapterFactory = (IGenericDatasetAdapterFactory) Class.forName(
+ adapterFactoryClass).newInstance();
+ coreAdapter = (FileSystemBasedAdapter) adapterFactory.createAdapter(configuration, atype);
+ this.configuration = configuration;
+ }
+
+ private void checkRequiredArgs(Map<String, String> configuration) throws Exception {
+ if (configuration.get(KEY_FILE_SYSTEM) == null) {
+ throw new Exception("File system type not specified. (fs=?) File system could be 'localfs' or 'hdfs'");
+ }
+ if (configuration.get(IGenericDatasetAdapterFactory.KEY_TYPE_NAME) == null) {
+ throw new Exception("Record type not specified (output-type-name=?)");
+ }
+ if (configuration.get(KEY_PATH) == null) {
+ throw new Exception("File path not specified (path=?)");
+ }
+ if (configuration.get(KEY_FORMAT) == null) {
+ throw new Exception("File format not specified (format=?)");
+ }
+ }
+
+ @Override
+ public InputStream getInputStream(int partition) throws IOException {
+ return coreAdapter.getInputStream(partition);
+ }
+
+ @Override
+ public void initialize(IHyracksTaskContext ctx) throws Exception {
+ coreAdapter.initialize(ctx);
+ this.ctx = ctx;
+ }
+
+ @Override
+ public void configure(Map<String, String> arguments) throws Exception {
+ coreAdapter.configure(arguments);
+ }
+
+ @Override
+ public AdapterType getAdapterType() {
+ return coreAdapter.getAdapterType();
+ }
+
+ @Override
+ protected ITupleParser getTupleParser() throws Exception {
+ ITupleParser parser = null;
+ if (format.equals(FORMAT_DELIMITED_TEXT)) {
+ parser = getRateControlledDelimitedDataTupleParser((ARecordType) atype);
+ } else if (format.equals(FORMAT_ADM)) {
+ parser = getRateControlledADMDataTupleParser((ARecordType) atype);
+ } else {
+ throw new IllegalArgumentException(" format " + configuration.get(KEY_FORMAT) + " not supported");
+ }
+ return parser;
+
+ }
+
+ protected ITupleParser getRateControlledDelimitedDataTupleParser(ARecordType recordType) throws AsterixException {
+ ITupleParser parser;
+ int n = recordType.getFieldTypes().length;
+ IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
+ for (int i = 0; i < n; i++) {
+ ATypeTag tag = recordType.getFieldTypes()[i].getTypeTag();
+ IValueParserFactory vpf = typeToValueParserFactMap.get(tag);
+ if (vpf == null) {
+ throw new NotImplementedException("No value parser factory for delimited fields of type " + tag);
+ }
+ fieldParserFactories[i] = vpf;
+
+ }
+ String delimiterValue = (String) configuration.get(KEY_DELIMITER);
+ if (delimiterValue != null && delimiterValue.length() > 1) {
+ throw new AsterixException("improper delimiter");
+ }
+
+ Character delimiter = delimiterValue.charAt(0);
+ parser = new RateControlledTupleParserFactory(recordType, fieldParserFactories, delimiter, configuration)
+ .createTupleParser(ctx);
+ return parser;
+ }
+
+ protected ITupleParser getRateControlledADMDataTupleParser(ARecordType recordType) throws AsterixException {
+ ITupleParser parser = null;
+ try {
+ parser = new RateControlledTupleParserFactory(recordType, configuration).createTupleParser(ctx);
+ return parser;
+ } catch (Exception e) {
+ throw new AsterixException(e);
+ }
+
+ }
+
+ @Override
+ public ARecordType getAdapterOutputType() {
+ return (ARecordType) atype;
+ }
+
+ @Override
+ public void alter(Map<String, String> properties) {
+ ((RateControlledTupleParser) parser).setInterTupleInterval(Long.parseLong(properties
+ .get(RateControlledTupleParser.INTER_TUPLE_INTERVAL)));
+ }
+
+ @Override
+ public void stop() {
+ ((RateControlledTupleParser) parser).stop();
+ }
+
+ @Override
+ public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+ return coreAdapter.getPartitionConstraint();
+ }
+}
+
+class RateControlledTupleParserFactory implements ITupleParserFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ private final ARecordType recordType;
+ private final IDataParser dataParser;
+ private final Map<String, String> configuration;
+
+ public RateControlledTupleParserFactory(ARecordType recordType, IValueParserFactory[] valueParserFactories,
+ char fieldDelimiter, Map<String, String> configuration) {
+ this.recordType = recordType;
+ dataParser = new DelimitedDataParser(recordType, valueParserFactories, fieldDelimiter);
+ this.configuration = configuration;
+ }
+
+ public RateControlledTupleParserFactory(ARecordType recordType, Map<String, String> configuration) {
+ this.recordType = recordType;
+ dataParser = new ADMDataParser();
+ this.configuration = configuration;
+ }
+
+ @Override
+ public ITupleParser createTupleParser(IHyracksTaskContext ctx) {
+ return new RateControlledTupleParser(ctx, recordType, dataParser, configuration);
+ }
+
+}
+
+class RateControlledTupleParser extends AbstractTupleParser {
+
+ private final IDataParser dataParser;
+ private long interTupleInterval;
+ private boolean delayConfigured;
+ private boolean continueIngestion = true;
+
+ public static final String INTER_TUPLE_INTERVAL = "tuple-interval";
+
+ public RateControlledTupleParser(IHyracksTaskContext ctx, ARecordType recType, IDataParser dataParser,
+ Map<String, String> configuration) {
+ super(ctx, recType);
+ this.dataParser = dataParser;
+ String propValue = configuration.get(INTER_TUPLE_INTERVAL);
+ if (propValue != null) {
+ interTupleInterval = Long.parseLong(propValue);
+ } else {
+ interTupleInterval = 0;
+ }
+ delayConfigured = interTupleInterval != 0;
+ }
+
+ public void setInterTupleInterval(long val) {
+ this.interTupleInterval = val;
+ this.delayConfigured = val > 0;
+ }
+
+ public void stop() {
+ continueIngestion = false;
+ }
+
+ @Override
+ public IDataParser getDataParser() {
+ return dataParser;
+ }
+
+ @Override
+ public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
+
+ appender.reset(frame, true);
+ IDataParser parser = getDataParser();
+ try {
+ parser.initialize(in, recType, true);
+ while (continueIngestion) {
+ tb.reset();
+ if (!parser.parse(tb.getDataOutput())) {
+ break;
+ }
+ tb.addFieldEndOffset();
+ if (delayConfigured) {
+ Thread.sleep(interTupleInterval);
+ }
+ addTupleToFrame(writer);
+ }
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } catch (AsterixException ae) {
+ throw new HyracksDataException(ae);
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ } catch (InterruptedException ie) {
+ throw new HyracksDataException(ie);
+ }
+ }
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
new file mode 100644
index 0000000..6c32acb
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.external.adapter.factory.IGenericDatasetAdapterFactory;
+import edu.uci.ics.asterix.external.dataset.adapter.IDatasourceAdapter;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.IAType;
+
+/**
+ * Factory class for creating @see{RateControllerFileSystemBasedAdapter} The
+ * adapter simulates a feed from the contents of a source file. The file can be
+ * on the local file system or on HDFS. The feed ends when the content of the
+ * source file has been ingested.
+ */
+public class RateControlledFileSystemBasedAdapterFactory implements IGenericDatasetAdapterFactory {
+
+ @Override
+ public IDatasourceAdapter createAdapter(Map<String, String> configuration, IAType type) throws Exception {
+ return new RateControlledFileSystemBasedAdapter((ARecordType) type, configuration);
+ }
+
+ @Override
+ public String getName() {
+ return "file_feed";
+ }
+
+}
\ No newline at end of file
diff --git a/asterix-transactions/pom.xml b/asterix-transactions/pom.xml
index c440c2a..7db5cd9 100644
--- a/asterix-transactions/pom.xml
+++ b/asterix-transactions/pom.xml
@@ -28,9 +28,6 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.2-SNAPSHOT</version>
- <type>jar</type>
- <scope>compile</scope>
</dependency>
</dependencies>
</project>
diff --git a/pom.xml b/pom.xml
index 506d57d..5080813 100644
--- a/pom.xml
+++ b/pom.xml
@@ -20,6 +20,11 @@
</plugins>
</build>
+ <properties>
+ <algebricks.version>0.2.3-SNAPSHOT</algebricks.version>
+ <hyracks.version>0.2.3-SNAPSHOT</hyracks.version>
+ </properties>
+
<scm>
<connection>scm:svn:https://grape.ics.uci.edu/svn/asterix/trunk/asterix</connection>
<developerConnection>scm:svn:https://grape.ics.uci.edu/svn/asterix/trunk/asterix</developerConnection>
@@ -137,4 +142,65 @@
<optional>true</optional>
</dependency>
</dependencies>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>algebricks-compiler</artifactId>
+ <version>${algebricks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-std</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-cc</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-nc</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-server</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-cli</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-hadoop</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-btree</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-rtree</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>
+ hyracks-storage-am-invertedindex
+ </artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ <version>${hyracks.version}</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
</project>